Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Rollup] Remove builders from TermsGroupConfig #32507

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@
import java.util.Objects;
import java.util.Set;

import static java.util.Arrays.asList;

/**
* The configuration object for the groups section in the rollup config.
* Basically just a wrapper for histo/date histo/terms objects
Expand Down Expand Up @@ -50,7 +52,7 @@ public class GroupConfig implements Writeable, ToXContentObject {
static {
PARSER.declareObject(GroupConfig.Builder::setDateHisto, (p,c) -> DateHistoGroupConfig.PARSER.apply(p,c).build(), DATE_HISTO);
PARSER.declareObject(GroupConfig.Builder::setHisto, (p,c) -> HistoGroupConfig.PARSER.apply(p,c).build(), HISTO);
PARSER.declareObject(GroupConfig.Builder::setTerms, (p,c) -> TermsGroupConfig.PARSER.apply(p,c).build(), TERMS);
PARSER.declareObject(GroupConfig.Builder::setTerms, (p,c) -> TermsGroupConfig.fromXContent(p), TERMS);
}

private GroupConfig(DateHistoGroupConfig dateHisto, @Nullable HistoGroupConfig histo, @Nullable TermsGroupConfig terms) {
Expand Down Expand Up @@ -84,7 +86,7 @@ public Set<String> getAllFields() {
fields.addAll(histo.getAllFields());
}
if (terms != null) {
fields.addAll(terms.getAllFields());
fields.addAll(asList(terms.getFields()));
}
return fields;
}
Expand All @@ -100,7 +102,6 @@ public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCa
}
}


@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
Expand All @@ -113,9 +114,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.endObject();
}
if (terms != null) {
builder.startObject(TERMS.getPreferredName());
terms.toXContent(builder, params);
builder.endObject();
builder.field(TERMS.getPreferredName(), terms);
}
builder.endObject();
return builder;
Expand Down Expand Up @@ -194,4 +193,4 @@ public GroupConfig build() {
return new GroupConfig(dateHisto, histo, terms);
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,10 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
Expand All @@ -24,13 +25,13 @@

import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;

/**
* The configuration object for the histograms in the rollup config
*
Expand All @@ -42,27 +43,39 @@
* ]
* }
*/
public class TermsGroupConfig implements Writeable, ToXContentFragment {
private static final String NAME = "term_group_config";
public static final ObjectParser<TermsGroupConfig.Builder, Void> PARSER = new ObjectParser<>(NAME, TermsGroupConfig.Builder::new);
public class TermsGroupConfig implements Writeable, ToXContentObject {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The other configs should also implement ToXContentObject and change their toXContent but let's do that in a follow up.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree and I'll take care of this if that's OK. I already have most of the changes in a local branch.


private static final String NAME = "terms";
private static final String FIELDS = "fields";

private static final ParseField FIELDS = new ParseField("fields");
private static final List<String> FLOAT_TYPES = Arrays.asList("half_float", "float", "double", "scaled_float");
private static final List<String> NATURAL_TYPES = Arrays.asList("byte", "short", "integer", "long");
private final String[] fields;

private static final ConstructingObjectParser<TermsGroupConfig, Void> PARSER;
static {
PARSER.declareStringArray(TermsGroupConfig.Builder::setFields, FIELDS);
PARSER = new ConstructingObjectParser<>(NAME, args -> {
@SuppressWarnings("unchecked") List<String> fields = (List<String>) args[0];
return new TermsGroupConfig(fields != null ? fields.toArray(new String[fields.size()]) : null);
});
PARSER.declareStringArray(constructorArg(), new ParseField(FIELDS));
}

private TermsGroupConfig(String[] fields) {
private final String[] fields;

public TermsGroupConfig(final String... fields) {
if (fields == null || fields.length == 0) {
throw new IllegalArgumentException("Fields must have at least one value");
}
this.fields = fields;
}

TermsGroupConfig(StreamInput in) throws IOException {
fields = in.readStringArray();
}

/**
* @return the names of the fields. Never {@code null}.
*/
public String[] getFields() {
return fields;
}
Expand All @@ -72,10 +85,6 @@ public String[] getFields() {
* set of date histograms. Used by the rollup indexer to iterate over historical data
*/
public List<CompositeValuesSourceBuilder<?>> toBuilders() {
if (fields.length == 0) {
return Collections.emptyList();
}

return Arrays.stream(fields).map(f -> {
TermsValuesSourceBuilder vsBuilder
= new TermsValuesSourceBuilder(RollupField.formatIndexerAggName(f, TermsAggregationBuilder.NAME));
Expand All @@ -94,14 +103,6 @@ public Map<String, Object> toAggCap() {
return map;
}

public Map<String, Object> getMetadata() {
return Collections.emptyMap();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just a note: this is a no-op for Terms, but the other groups (date, histo) use it to store the interval. So we'll need to preserve that functionality somehow/somewhere for the other groups

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree, we'll keep this for the other groups.

}

public Set<String> getAllFields() {
return Arrays.stream(fields).collect(Collectors.toSet());
}

public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
ActionRequestValidationException validationException) {

Expand Down Expand Up @@ -138,8 +139,11 @@ public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCa

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(FIELDS.getPreferredName(), fields);
return builder;
builder.startObject();
{
builder.field(FIELDS, fields);
}
return builder.endObject();
}

@Override
Expand All @@ -148,18 +152,15 @@ public void writeTo(StreamOutput out) throws IOException {
}

@Override
public boolean equals(Object other) {
public boolean equals(final Object other) {
if (this == other) {
return true;
}

if (other == null || getClass() != other.getClass()) {
return false;
}

TermsGroupConfig that = (TermsGroupConfig) other;

return Arrays.equals(this.fields, that.fields);
final TermsGroupConfig that = (TermsGroupConfig) other;
return Arrays.equals(fields, that.fields);
}

@Override
Expand All @@ -172,23 +173,7 @@ public String toString() {
return Strings.toString(this, true, true);
}

public static class Builder {
private List<String> fields;

public List<String> getFields() {
return fields;
}

public TermsGroupConfig.Builder setFields(List<String> fields) {
this.fields = fields;
return this;
}

public TermsGroupConfig build() {
if (fields == null || fields.isEmpty()) {
throw new IllegalArgumentException("Parameter [" + FIELDS + "] must have at least one value.");
}
return new TermsGroupConfig(fields.toArray(new String[0]));
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should also remove the other builders to be consistent. WDYT @polyfractal ? I can do that in a follow up.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree and I'll take care of this if that's OK. I already have most of the changes in a local branch.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

go for it, thanks @tlrx !

public static TermsGroupConfig fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,13 @@

import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

import static com.carrotsearch.randomizedtesting.generators.RandomNumbers.randomIntBetween;
import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLengthBetween;

public class ConfigTestHelpers {

public static RollupJobConfig.Builder getRollupJob(String jobId) {
Expand Down Expand Up @@ -49,7 +53,7 @@ public static GroupConfig.Builder getGroupConfig() {
groupBuilder.setHisto(getHisto().build());
}
if (ESTestCase.randomBoolean()) {
groupBuilder.setTerms(getTerms().build());
groupBuilder.setTerms(randomTermsGroupConfig(ESTestCase.random()));
}
return groupBuilder;
}
Expand Down Expand Up @@ -105,12 +109,6 @@ public static HistoGroupConfig.Builder getHisto() {
return histoBuilder;
}

public static TermsGroupConfig.Builder getTerms() {
TermsGroupConfig.Builder builder = new TermsGroupConfig.Builder();
builder.setFields(getFields());
return builder;
}

public static List<String> getFields() {
return IntStream.range(0, ESTestCase.randomIntBetween(1, 10))
.mapToObj(n -> ESTestCase.randomAlphaOfLengthBetween(5, 10))
Expand All @@ -126,4 +124,21 @@ public static String getCronString() {
" ?" + //day of week
" " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(1970, 2199))); //year
}

public static TermsGroupConfig randomTermsGroupConfig(final Random random) {
return new TermsGroupConfig(randomFields(random));
}

private static String[] randomFields(final Random random) {
final int numFields = randomIntBetween(random, 1, 10);
final String[] fields = new String[numFields];
for (int i = 0; i < numFields; i++) {
fields[i] = randomField(random);
}
return fields;
}

private static String randomField(final Random random) {
return randomAsciiAlphanumOfLengthBetween(random, 5, 10);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,27 +11,23 @@
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers;

import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomTermsGroupConfig;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

public class TermsGroupConfigSerializingTests extends AbstractSerializingTestCase<TermsGroupConfig> {

private static final List<String> FLOAT_TYPES = Arrays.asList("half_float", "float", "double", "scaled_float");
private static final List<String> NATURAL_TYPES = Arrays.asList("byte", "short", "integer", "long");

@Override
protected TermsGroupConfig doParseInstance(XContentParser parser) throws IOException {
return TermsGroupConfig.PARSER.apply(parser, null).build();
return TermsGroupConfig.fromXContent(parser);
}

@Override
Expand All @@ -41,58 +37,48 @@ protected Writeable.Reader<TermsGroupConfig> instanceReader() {

@Override
protected TermsGroupConfig createTestInstance() {
return ConfigTestHelpers.getTerms().build();
return randomTermsGroupConfig(random());
}

public void testValidateNoMapping() throws IOException {
public void testValidateNoMapping() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();

TermsGroupConfig config = new TermsGroupConfig.Builder()
.setFields(Collections.singletonList("my_field"))
.build();
TermsGroupConfig config = new TermsGroupConfig("my_field");
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("Could not find a [numeric] or [keyword/text] field with name " +
"[my_field] in any of the indices matching the index pattern."));
}

public void testValidateNomatchingField() throws IOException {

public void testValidateNomatchingField() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();

// Have to mock fieldcaps because the ctor's aren't public...
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
responseMap.put("some_other_field", Collections.singletonMap("keyword", fieldCaps));

TermsGroupConfig config = new TermsGroupConfig.Builder()
.setFields(Collections.singletonList("my_field"))
.build();
TermsGroupConfig config = new TermsGroupConfig("my_field");
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("Could not find a [numeric] or [keyword/text] field with name " +
"[my_field] in any of the indices matching the index pattern."));
}

public void testValidateFieldWrongType() throws IOException {

public void testValidateFieldWrongType() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();

// Have to mock fieldcaps because the ctor's aren't public...
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
responseMap.put("my_field", Collections.singletonMap("geo_point", fieldCaps));

TermsGroupConfig config = new TermsGroupConfig.Builder()
.setFields(Collections.singletonList("my_field"))
.build();
TermsGroupConfig config = new TermsGroupConfig("my_field");
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("The field referenced by a terms group must be a [numeric] or " +
"[keyword/text] type, but found [geo_point] for field [my_field]"));
}

public void testValidateFieldMatchingNotAggregatable() throws IOException {


public void testValidateFieldMatchingNotAggregatable() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();

Expand All @@ -101,14 +87,12 @@ public void testValidateFieldMatchingNotAggregatable() throws IOException {
when(fieldCaps.isAggregatable()).thenReturn(false);
responseMap.put("my_field", Collections.singletonMap(getRandomType(), fieldCaps));

TermsGroupConfig config = new TermsGroupConfig.Builder()
.setFields(Collections.singletonList("my_field"))
.build();
TermsGroupConfig config = new TermsGroupConfig("my_field");
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("The field [my_field] must be aggregatable across all indices, but is not."));
}

public void testValidateMatchingField() throws IOException {
public void testValidateMatchingField() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
String type = getRandomType();
Expand All @@ -118,9 +102,7 @@ public void testValidateMatchingField() throws IOException {
when(fieldCaps.isAggregatable()).thenReturn(true);
responseMap.put("my_field", Collections.singletonMap(type, fieldCaps));

TermsGroupConfig config = new TermsGroupConfig.Builder()
.setFields(Collections.singletonList("my_field"))
.build();
TermsGroupConfig config = new TermsGroupConfig("my_field");
config.validateMappings(responseMap, e);
if (e.validationErrors().size() != 0) {
fail(e.getMessage());
Expand Down
Loading