Skip to content

Commit

Permalink
Merge branch 'main' into alloc-split
Browse files Browse the repository at this point in the history
  • Loading branch information
mch2 committed Sep 4, 2024
2 parents 1efafc0 + 4f57d6a commit 05d8e19
Show file tree
Hide file tree
Showing 222 changed files with 5,526 additions and 1,536 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ public void testSnapshotsStatus() {
boolean ignoreUnavailable = randomBoolean();
String endpoint = "/_snapshot/" + repository + "/" + snapshotNames.toString() + "/_status";

SnapshotsStatusRequest snapshotsStatusRequest = new SnapshotsStatusRequest(repository, snapshots, indices);
SnapshotsStatusRequest snapshotsStatusRequest = (new SnapshotsStatusRequest(repository, snapshots)).indices(indices);
RequestConvertersTests.setRandomClusterManagerTimeout(snapshotsStatusRequest, expectedParams);
snapshotsStatusRequest.ignoreUnavailable(ignoreUnavailable);
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
import org.opensearch.common.xcontent.support.XContentMapValues;
import org.opensearch.core.xcontent.XContentParser;
import org.opensearch.core.xcontent.XContentParser.Token;
import org.opensearch.index.compositeindex.datacube.DimensionType;
import org.opensearch.index.fielddata.FieldData;
import org.opensearch.index.fielddata.IndexFieldData;
import org.opensearch.index.fielddata.IndexNumericFieldData;
Expand All @@ -71,10 +72,12 @@
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Supplier;

/** A {@link FieldMapper} for scaled floats. Values are internally multiplied
* by a scaling factor and rounded to the closest long. */
* by a scaling factor and rounded to the closest long.
*/
public class ScaledFloatFieldMapper extends ParametrizedFieldMapper {

public static final String CONTENT_TYPE = "scaled_float";
Expand Down Expand Up @@ -162,11 +165,21 @@ public ScaledFloatFieldMapper build(BuilderContext context) {
);
return new ScaledFloatFieldMapper(name, type, multiFieldsBuilder.build(this, context), copyTo.build(), this);
}

@Override
public Optional<DimensionType> getSupportedDataCubeDimensionType() {
return Optional.of(DimensionType.NUMERIC);
}

@Override
public boolean isDataCubeMetricSupported() {
return true;
}
}

public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n, c.getSettings()));

public static final class ScaledFloatFieldType extends SimpleMappedFieldType implements NumericPointEncoder {
public static final class ScaledFloatFieldType extends SimpleMappedFieldType implements NumericPointEncoder, FieldValueConverter {

private final double scalingFactor;
private final Double nullValue;
Expand Down Expand Up @@ -340,6 +353,12 @@ public DocValueFormat docValueFormat(String format, ZoneId timeZone) {
private double scale(Object input) {
return new BigDecimal(Double.toString(parse(input))).multiply(BigDecimal.valueOf(scalingFactor)).doubleValue();
}

@Override
public double toDoubleValue(long value) {
double inverseScalingFactor = 1d / scalingFactor;
return value * inverseScalingFactor;
}
}

private final Explicit<Boolean> ignoreMalformed;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -400,11 +400,6 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
builder.endObject();
}

@Override
public final String fieldName() {
return getDefaultFieldName();
}

@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
final MappedFieldType ft = context.fieldMapper(field);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,18 +34,24 @@

import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.util.FeatureFlags;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.index.compositeindex.datacube.startree.StarTreeIndexSettings;
import org.opensearch.plugins.Plugin;
import org.junit.AfterClass;
import org.junit.BeforeClass;

import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;

import static java.util.Collections.singletonList;
import static org.opensearch.common.util.FeatureFlags.STAR_TREE_INDEX;
import static org.hamcrest.Matchers.containsString;

public class ScaledFloatFieldMapperTests extends MapperTestCase {
Expand Down Expand Up @@ -91,24 +97,112 @@ public void testExistsQueryDocValuesDisabled() throws IOException {
assertParseMinimalWarnings();
}

public void testDefaults() throws Exception {
XContentBuilder mapping = fieldMapping(b -> b.field("type", "scaled_float").field("scaling_factor", 10.0));
@BeforeClass
public static void createMapper() {
FeatureFlags.initializeFeatureFlags(Settings.builder().put(STAR_TREE_INDEX, "true").build());
}

@AfterClass
public static void clearMapper() {
FeatureFlags.initializeFeatureFlags(Settings.EMPTY);
}

public void testScaledFloatWithStarTree() throws Exception {

double scalingFactorField1 = randomDouble() * 100;
double scalingFactorField2 = randomDouble() * 100;
double scalingFactorField3 = randomDouble() * 100;

XContentBuilder mapping = getStarTreeMappingWithScaledFloat(scalingFactorField1, scalingFactorField2, scalingFactorField3);
DocumentMapper mapper = createDocumentMapper(mapping);
assertEquals(mapping.toString(), mapper.mappingSource().toString());
assertTrue(mapping.toString().contains("startree"));

ParsedDocument doc = mapper.parse(source(b -> b.field("field", 123)));
IndexableField[] fields = doc.rootDoc().getFields("field");
long randomLongField1 = randomLong();
long randomLongField2 = randomLong();
long randomLongField3 = randomLong();
ParsedDocument doc = mapper.parse(
source(b -> b.field("field1", randomLongField1).field("field2", randomLongField2).field("field3", randomLongField3))
);
validateScaledFloatFields(doc, "field1", randomLongField1, scalingFactorField1);
validateScaledFloatFields(doc, "field2", randomLongField2, scalingFactorField2);
validateScaledFloatFields(doc, "field3", randomLongField3, scalingFactorField3);
}

@Override
protected Settings getIndexSettings() {
return Settings.builder()
.put(StarTreeIndexSettings.IS_COMPOSITE_INDEX_SETTING.getKey(), true)
.put(super.getIndexSettings())
.build();
}

private static void validateScaledFloatFields(ParsedDocument doc, String field, long value, double scalingFactor) {
IndexableField[] fields = doc.rootDoc().getFields(field);
assertEquals(2, fields.length);
IndexableField pointField = fields[0];
assertEquals(1, pointField.fieldType().pointDimensionCount());
assertFalse(pointField.fieldType().stored());
assertEquals(1230, pointField.numericValue().longValue());
assertEquals((long) (value * scalingFactor), pointField.numericValue().longValue());
IndexableField dvField = fields[1];
assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType());
assertEquals(1230, dvField.numericValue().longValue());
assertEquals((long) (value * scalingFactor), dvField.numericValue().longValue());
assertFalse(dvField.fieldType().stored());
}

private XContentBuilder getStarTreeMappingWithScaledFloat(
double scalingFactorField1,
double scalingFactorField2,
double scalingFactorField3
) throws IOException {
return topMapping(b -> {
b.startObject("composite");
b.startObject("startree");
b.field("type", "star_tree");
b.startObject("config");
b.field("max_leaf_docs", 100);
b.startArray("ordered_dimensions");
b.startObject();
b.field("name", "field1");
b.endObject();
b.startObject();
b.field("name", "field2");
b.endObject();
b.endArray();
b.startArray("metrics");
b.startObject();
b.field("name", "field3");
b.startArray("stats");
b.value("sum");
b.value("value_count");
b.endArray();
b.endObject();
b.endArray();
b.endObject();
b.endObject();
b.endObject();
b.startObject("properties");
b.startObject("field1");
b.field("type", "scaled_float").field("scaling_factor", scalingFactorField1);
b.endObject();
b.startObject("field2");
b.field("type", "scaled_float").field("scaling_factor", scalingFactorField2);
b.endObject();
b.startObject("field3");
b.field("type", "scaled_float").field("scaling_factor", scalingFactorField3);
b.endObject();
b.endObject();
});
}

public void testDefaults() throws Exception {
XContentBuilder mapping = fieldMapping(b -> b.field("type", "scaled_float").field("scaling_factor", 10.0));
DocumentMapper mapper = createDocumentMapper(mapping);
assertEquals(mapping.toString(), mapper.mappingSource().toString());

ParsedDocument doc = mapper.parse(source(b -> b.field("field", 123)));
validateScaledFloatFields(doc, "field", 123, 10.0);
}

public void testMissingScalingFactor() {
Exception e = expectThrows(
MapperParsingException.class,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -264,11 +264,6 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
builder.endObject();
}

@Override
public final String fieldName() {
return getDefaultFieldName();
}

public static HasChildQueryBuilder fromXContent(XContentParser parser) throws IOException {
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String childType = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,11 +233,6 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
builder.endObject();
}

@Override
public final String fieldName() {
return getDefaultFieldName();
}

public static HasParentQueryBuilder fromXContent(XContentParser parser) throws IOException {
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String parentType = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -130,11 +130,6 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
builder.endObject();
}

@Override
public final String fieldName() {
return getDefaultFieldName();
}

public static ParentIdQueryBuilder fromXContent(XContentParser parser) throws IOException {
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String type = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -356,11 +356,6 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep
builder.endObject();
}

@Override
public final String fieldName() {
return getDefaultFieldName();
}

private static final ConstructingObjectParser<PercolateQueryBuilder, Void> PARSER = new ConstructingObjectParser<>(NAME, args -> {
String field = (String) args[0];
BytesReference document = (BytesReference) args[1];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,12 @@
import org.opensearch.plugin.wlm.action.TransportCreateQueryGroupAction;
import org.opensearch.plugin.wlm.action.TransportDeleteQueryGroupAction;
import org.opensearch.plugin.wlm.action.TransportGetQueryGroupAction;
import org.opensearch.plugin.wlm.action.TransportUpdateQueryGroupAction;
import org.opensearch.plugin.wlm.action.UpdateQueryGroupAction;
import org.opensearch.plugin.wlm.rest.RestCreateQueryGroupAction;
import org.opensearch.plugin.wlm.rest.RestDeleteQueryGroupAction;
import org.opensearch.plugin.wlm.rest.RestGetQueryGroupAction;
import org.opensearch.plugin.wlm.rest.RestUpdateQueryGroupAction;
import org.opensearch.plugin.wlm.service.QueryGroupPersistenceService;
import org.opensearch.plugins.ActionPlugin;
import org.opensearch.plugins.Plugin;
Expand All @@ -52,7 +55,8 @@ public WorkloadManagementPlugin() {}
return List.of(
new ActionPlugin.ActionHandler<>(CreateQueryGroupAction.INSTANCE, TransportCreateQueryGroupAction.class),
new ActionPlugin.ActionHandler<>(GetQueryGroupAction.INSTANCE, TransportGetQueryGroupAction.class),
new ActionPlugin.ActionHandler<>(DeleteQueryGroupAction.INSTANCE, TransportDeleteQueryGroupAction.class)
new ActionPlugin.ActionHandler<>(DeleteQueryGroupAction.INSTANCE, TransportDeleteQueryGroupAction.class),
new ActionPlugin.ActionHandler<>(UpdateQueryGroupAction.INSTANCE, TransportUpdateQueryGroupAction.class)
);
}

Expand All @@ -66,7 +70,12 @@ public List<RestHandler> getRestHandlers(
IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster
) {
return List.of(new RestCreateQueryGroupAction(), new RestGetQueryGroupAction(), new RestDeleteQueryGroupAction());
return List.of(
new RestCreateQueryGroupAction(),
new RestGetQueryGroupAction(),
new RestDeleteQueryGroupAction(),
new RestUpdateQueryGroupAction()
);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,15 @@ public class CreateQueryGroupRequest extends ActionRequest {
* Constructor for CreateQueryGroupRequest
* @param queryGroup - A {@link QueryGroup} object
*/
public CreateQueryGroupRequest(QueryGroup queryGroup) {
CreateQueryGroupRequest(QueryGroup queryGroup) {
this.queryGroup = queryGroup;
}

/**
* Constructor for CreateQueryGroupRequest
* @param in - A {@link StreamInput} object
*/
public CreateQueryGroupRequest(StreamInput in) throws IOException {
CreateQueryGroupRequest(StreamInput in) throws IOException {
super(in);
queryGroup = new QueryGroup(in);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.plugin.wlm.action;

import org.opensearch.action.support.ActionFilters;
import org.opensearch.action.support.HandledTransportAction;
import org.opensearch.common.inject.Inject;
import org.opensearch.core.action.ActionListener;
import org.opensearch.plugin.wlm.service.QueryGroupPersistenceService;
import org.opensearch.tasks.Task;
import org.opensearch.transport.TransportService;

/**
* Transport action to update QueryGroup
*
* @opensearch.experimental
*/
public class TransportUpdateQueryGroupAction extends HandledTransportAction<UpdateQueryGroupRequest, UpdateQueryGroupResponse> {

private final QueryGroupPersistenceService queryGroupPersistenceService;

/**
* Constructor for TransportUpdateQueryGroupAction
*
* @param actionName - action name
* @param transportService - a {@link TransportService} object
* @param actionFilters - a {@link ActionFilters} object
* @param queryGroupPersistenceService - a {@link QueryGroupPersistenceService} object
*/
@Inject
public TransportUpdateQueryGroupAction(
String actionName,
TransportService transportService,
ActionFilters actionFilters,
QueryGroupPersistenceService queryGroupPersistenceService
) {
super(UpdateQueryGroupAction.NAME, transportService, actionFilters, UpdateQueryGroupRequest::new);
this.queryGroupPersistenceService = queryGroupPersistenceService;
}

@Override
protected void doExecute(Task task, UpdateQueryGroupRequest request, ActionListener<UpdateQueryGroupResponse> listener) {
queryGroupPersistenceService.updateInClusterStateMetadata(request, listener);
}
}
Loading

0 comments on commit 05d8e19

Please sign in to comment.