Skip to content

Commit

Permalink
Code cleanup
Browse files Browse the repository at this point in the history
Signed-off-by: coduz <alberto.codutti@eurotech.com>
  • Loading branch information
Coduz committed Oct 9, 2020
1 parent 106c012 commit 14e15bb
Show file tree
Hide file tree
Showing 30 changed files with 178 additions and 250 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
package org.eclipse.kapua.service.elasticsearch.client;

import com.fasterxml.jackson.databind.JsonNode;
import org.eclipse.kapua.KapuaException;
import org.eclipse.kapua.service.elasticsearch.client.exception.DatamodelMappingException;
import org.eclipse.kapua.service.elasticsearch.client.exception.QueryMappingException;

/**
Expand All @@ -37,11 +35,10 @@ public interface QueryConverter {
*
* @param query The queyr to convert.
* @return The converted query.
* @throws QueryMappingException if query mappings are not correst.
* @throws DatamodelMappingException if model mappings are not correct.
* @throws QueryMappingException if query mappings are not correst.
* @since 1.0.0
*/
JsonNode convertQuery(Object query) throws QueryMappingException, DatamodelMappingException, KapuaException;
JsonNode convertQuery(Object query) throws QueryMappingException;

/**
* Gets the query fetch style
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,21 @@ public class QueryMappingException extends ClientException {
/**
* Constructor.
*
* @param reason The reason of this {@link QueryMappingException}.
* @since 1.3.0
*/
public QueryMappingException() {
super(ClientErrorCodes.QUERY_MAPPING_EXCEPTION);
public QueryMappingException(String reason) {
this(null, reason);
}

/**
* Constructor.
*
* @param cause The root cause of the {@link QueryMappingException}.
* @param reason The reason of this {@link QueryMappingException}.
* @since 1.3.0
*/
public QueryMappingException(Throwable cause, String reason) {
super(ClientErrorCodes.QUERY_MAPPING_EXCEPTION, cause, reason);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.http.ParseException;
import org.apache.http.util.EntityUtils;
import org.eclipse.kapua.KapuaException;
import org.eclipse.kapua.commons.metric.MetricServiceFactory;
import org.eclipse.kapua.commons.metric.MetricsService;
import org.eclipse.kapua.commons.util.RandomUtils;
Expand Down Expand Up @@ -265,13 +264,7 @@ public <T> T find(TypeDescriptor typeDescriptor, Object query, Class<T> clazz) t

@Override
public <T> ResultList<T> query(TypeDescriptor typeDescriptor, Object query, Class<T> clazz) throws ClientException {
JsonNode queryJsonNode = null;
try {
queryJsonNode = getModelConverter().convertQuery(query);
} catch (KapuaException e) {
e.printStackTrace();
}
Object queryFetchStyle = getModelConverter().getFetchStyle(query);
JsonNode queryJsonNode = getModelConverter().convertQuery(query);
LOG.debug(QUERY_CONVERTED_QUERY, queryJsonNode);

String json = writeRequestFromJsonNode(queryJsonNode);
Expand Down Expand Up @@ -303,6 +296,7 @@ public <T> ResultList<T> query(TypeDescriptor typeDescriptor, Object query, Clas
}

ResultList<T> resultList = new ResultList<>(totalCount);
Object queryFetchStyle = getModelConverter().getFetchStyle(query);
if (resultsNode != null && !resultsNode.isEmpty()) {
for (JsonNode result : resultsNode) {
Map<String, Object> object = objectMapper.convertValue(result.get(SchemaKeys.KEY_SOURCE), Map.class);
Expand All @@ -323,12 +317,8 @@ public <T> ResultList<T> query(TypeDescriptor typeDescriptor, Object query, Clas

@Override
public long count(TypeDescriptor typeDescriptor, Object query) throws ClientException {
JsonNode queryJsonNode = null;
try {
queryJsonNode = getModelConverter().convertQuery(query);
} catch (KapuaException e) {
e.printStackTrace();
}
JsonNode queryJsonNode = getModelConverter().convertQuery(query);

LOG.debug(COUNT_CONVERTED_QUERY, queryJsonNode);

String json = writeRequestFromJsonNode(queryJsonNode);
Expand Down Expand Up @@ -381,12 +371,8 @@ public void delete(TypeDescriptor typeDescriptor, String id) throws ClientExcept

@Override
public void deleteByQuery(TypeDescriptor typeDescriptor, Object query) throws ClientException {
JsonNode queryJsonNode = null;
try {
queryJsonNode = getModelConverter().convertQuery(query);
} catch (KapuaException e) {
e.printStackTrace();
}
JsonNode queryJsonNode = getModelConverter().convertQuery(query);

LOG.debug(QUERY_CONVERTED_QUERY, queryJsonNode);

String json = writeRequestFromJsonNode(queryJsonNode);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.base.MoreObjects;
import org.eclipse.kapua.KapuaException;
import org.eclipse.kapua.service.elasticsearch.client.AbstractElasticsearchClient;
import org.eclipse.kapua.service.elasticsearch.client.ModelContext;
import org.eclipse.kapua.service.elasticsearch.client.QueryConverter;
Expand Down Expand Up @@ -207,13 +206,7 @@ public <T> T find(TypeDescriptor typeDescriptor, Object query, Class<T> clazz) t

@Override
public <T> ResultList<T> query(TypeDescriptor typeDescriptor, Object query, Class<T> clazz) throws ClientException {
JsonNode queryMap = null;
try {
queryMap = getModelConverter().convertQuery(query);
} catch (KapuaException e) {
e.printStackTrace();
}
Object queryFetchStyle = getModelConverter().getFetchStyle(query);
JsonNode queryMap = getModelConverter().convertQuery(query);
LOG.debug("Query - converted query: '{}'", queryMap);

ObjectNode fetchSourceFields = (ObjectNode) queryMap.path(SchemaKeys.KEY_SOURCE);
Expand Down Expand Up @@ -243,6 +236,7 @@ public <T> ResultList<T> query(TypeDescriptor typeDescriptor, Object query, Clas
}

ResultList<T> result = new ResultList<>(totalCount);
Object queryFetchStyle = getModelConverter().getFetchStyle(query);
if (searchHits != null) {
for (SearchHit searchHit : searchHits) {
Map<String, Object> object = searchHit.getSource();
Expand All @@ -258,13 +252,8 @@ public <T> ResultList<T> query(TypeDescriptor typeDescriptor, Object query, Clas

@Override
public long count(TypeDescriptor typeDescriptor, Object query) throws ClientException {
// TODO check for fetch none
JsonNode queryMap = null;
try {
queryMap = getModelConverter().convertQuery(query);
} catch (KapuaException e) {
e.printStackTrace();
}
JsonNode queryMap = getModelConverter().convertQuery(query);

SearchRequestBuilder searchReqBuilder = getClient().prepareSearch(typeDescriptor.getIndex());
SearchHits searchHits = null;
try {
Expand Down Expand Up @@ -303,12 +292,8 @@ public void delete(TypeDescriptor typeDescriptor, String id) throws ClientExcept

@Override
public void deleteByQuery(TypeDescriptor typeDescriptor, Object query) throws ClientException {
JsonNode queryMap = null;
try {
queryMap = getModelConverter().convertQuery(query);
} catch (KapuaException e) {
e.printStackTrace();
}
JsonNode queryMap = getModelConverter().convertQuery(query);

TimeValue queryTimeout = getQueryTimeout();
TimeValue scrollTimeout = getScrollTimeout();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@

/**
* {@link StorableQuery} definition.
* <p>
*
* @since 1.0.0
*/
Expand All @@ -46,7 +45,7 @@ public interface StorableQuery {
* @param fetchAttribute The fetch attribute to add to the list.
* @since 1.0.0
*/
public void addFetchAttributes(String fetchAttribute);
void addFetchAttributes(String fetchAttribute);

/**
* Sets the fetch attribute names list.<br>
Expand All @@ -55,7 +54,7 @@ public interface StorableQuery {
* @param fetchAttributeNames The fetch attribute names list.
* @since 1.0.0
*/
public void setFetchAttributes(List<String> fetchAttributeNames);
void setFetchAttributes(List<String> fetchAttributeNames);

/**
* Gets the scope {@link KapuaId}.
Expand Down Expand Up @@ -187,4 +186,21 @@ public interface StorableQuery {
*/
void setSortFields(List<SortField> sortFields);

/**
* Gets the included {@link StorableField}s according to the {@link StorableFetchStyle}.
*
* @param fetchStyle The {@link StorableFetchStyle}.
* @return The included {@link StorableField}s according to the {@link StorableFetchStyle}.
* @since 1.0.0
*/
String[] getIncludes(StorableFetchStyle fetchStyle);

/**
* Gets the excluded {@link StorableField}s according to the {@link StorableFetchStyle}.
*
* @param fetchStyle The {@link StorableFetchStyle}.
* @return The excluded {@link StorableField}s according to the {@link StorableFetchStyle}.
* @since 1.0.0
*/
String[] getExcludes(StorableFetchStyle fetchStyle);
}
Original file line number Diff line number Diff line change
Expand Up @@ -63,24 +63,6 @@ public AbstractStorableQuery(KapuaId scopeId) {
setScopeId(scopeId);
}

/**
* Gets the included {@link StorableField}s according to the {@link StorableFetchStyle}.
*
* @param fetchStyle The {@link StorableFetchStyle}.
* @return The included {@link StorableField}s according to the {@link StorableFetchStyle}.
* @since 1.0.0
*/
public abstract String[] getIncludes(StorableFetchStyle fetchStyle);

/**
* Gets the excluded {@link StorableField}s according to the {@link StorableFetchStyle}.
*
* @param fetchStyle The {@link StorableFetchStyle}.
* @return The excluded {@link StorableField}s according to the {@link StorableFetchStyle}.
* @since 1.0.0
*/
public abstract String[] getExcludes(StorableFetchStyle fetchStyle);

/**
* Gets the {@link StorableField}s.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
*******************************************************************************/
package org.eclipse.kapua.service.datastore.internal;

import org.eclipse.kapua.KapuaException;
import org.eclipse.kapua.KapuaIllegalArgumentException;
import org.eclipse.kapua.commons.util.ArgumentValidator;
import org.eclipse.kapua.locator.KapuaLocator;
Expand Down Expand Up @@ -141,11 +140,8 @@ public void delete(KapuaId scopeId, StorableId id) throws KapuaIllegalArgumentEx
String indexName = SchemaUtil.getKapuaIndexName(scopeId);
ChannelInfo channelInfo = find(scopeId, id);
if (channelInfo != null) {
try {
mediator.onBeforeChannelInfoDelete(channelInfo);
} catch (KapuaException e) {
e.printStackTrace();
}
mediator.onBeforeChannelInfoDelete(channelInfo);

TypeDescriptor typeDescriptor = new TypeDescriptor(indexName, ChannelInfoSchema.CHANNEL_TYPE_NAME);
getElasticsearchClient().delete(typeDescriptor, id.toString());
}
Expand Down Expand Up @@ -244,13 +240,9 @@ void delete(ChannelInfoQuery query) throws KapuaIllegalArgumentException, Config

String indexName = SchemaUtil.getKapuaIndexName(query.getScopeId());
ChannelInfoListResult channels = query(query);
// TODO Improve performances

for (ChannelInfo channelInfo : channels.getItems()) {
try {
mediator.onBeforeChannelInfoDelete(channelInfo);
} catch (KapuaException e) {
e.printStackTrace();
}
mediator.onBeforeChannelInfoDelete(channelInfo);
}

TypeDescriptor typeDescriptor = new TypeDescriptor(indexName, ChannelInfoSchema.CHANNEL_TYPE_NAME);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
*******************************************************************************/
package org.eclipse.kapua.service.datastore.internal;

import org.eclipse.kapua.KapuaException;
import org.eclipse.kapua.KapuaIllegalArgumentException;
import org.eclipse.kapua.commons.util.ArgumentValidator;
import org.eclipse.kapua.locator.KapuaLocator;
Expand All @@ -34,6 +33,7 @@
import org.eclipse.kapua.service.elasticsearch.client.model.TypeDescriptor;
import org.eclipse.kapua.service.elasticsearch.client.model.UpdateRequest;
import org.eclipse.kapua.service.elasticsearch.client.model.UpdateResponse;
import org.eclipse.kapua.service.storable.exception.MappingException;
import org.eclipse.kapua.service.storable.model.id.StorableId;
import org.eclipse.kapua.service.storable.model.id.StorableIdFactory;
import org.eclipse.kapua.service.storable.model.query.predicate.IdsPredicate;
Expand Down Expand Up @@ -82,7 +82,7 @@ public MetricInfoRegistryFacade(ConfigurationProvider configProvider, MetricInfo
* @throws ConfigurationException
* @throws ClientException
*/
public StorableId upstore(MetricInfo metricInfo) throws KapuaIllegalArgumentException, ConfigurationException, ClientException {
public StorableId upstore(MetricInfo metricInfo) throws KapuaIllegalArgumentException, ConfigurationException, ClientException, MappingException {
ArgumentValidator.notNull(metricInfo, "metricInfo");
ArgumentValidator.notNull(metricInfo.getScopeId(), "metricInfo.scopeId");
ArgumentValidator.notNull(metricInfo.getFirstMessageId(), "metricInfoCreator.firstPublishedMessageId");
Expand All @@ -97,12 +97,8 @@ public StorableId upstore(MetricInfo metricInfo) throws KapuaIllegalArgumentExce
// fix #REPLACE_ISSUE_NUMBER
MetricInfo storedField = find(metricInfo.getScopeId(), storableId);
if (storedField == null) {
Metadata metadata = null;
try {
metadata = mediator.getMetadata(metricInfo.getScopeId(), metricInfo.getFirstMessageOn().getTime());
} catch (KapuaException e) {
e.printStackTrace();
}
Metadata metadata = mediator.getMetadata(metricInfo.getScopeId(), metricInfo.getFirstMessageOn().getTime());

String kapuaIndexName = metadata.getRegistryIndexName();

UpdateRequest request = new UpdateRequest(metricInfo.getId().toString(), new TypeDescriptor(metadata.getRegistryIndexName(), MetricInfoSchema.METRIC_TYPE_NAME), metricInfo);
Expand All @@ -128,7 +124,8 @@ public StorableId upstore(MetricInfo metricInfo) throws KapuaIllegalArgumentExce
public BulkUpdateResponse upstore(MetricInfo[] metricInfos)
throws KapuaIllegalArgumentException,
ConfigurationException,
ClientException {
ClientException,
MappingException {
ArgumentValidator.notNull(metricInfos, "metricInfos");

BulkUpdateRequest bulkRequest = new BulkUpdateRequest();
Expand All @@ -145,12 +142,8 @@ public BulkUpdateResponse upstore(MetricInfo[] metricInfos)
continue;
}
performUpdate = true;
Metadata metadata = null;
try {
metadata = mediator.getMetadata(metricInfo.getScopeId(), metricInfo.getFirstMessageOn().getTime());
} catch (KapuaException e) {
e.printStackTrace();
}
Metadata metadata = mediator.getMetadata(metricInfo.getScopeId(), metricInfo.getFirstMessageOn().getTime());

bulkRequest.add(
new UpdateRequest(
metricInfo.getId().toString(),
Expand Down

This file was deleted.

Loading

0 comments on commit 14e15bb

Please sign in to comment.