Skip to content

Commit

Permalink
Case insensitive for column names
Browse files Browse the repository at this point in the history
  • Loading branch information
EmmyMiao87 committed Aug 8, 2019
1 parent 3807a27 commit c990f45
Show file tree
Hide file tree
Showing 5 changed files with 23 additions and 25 deletions.
29 changes: 13 additions & 16 deletions fe/src/main/java/org/apache/doris/analysis/DataDescription.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,14 @@
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
// used to describe data info which is needed to import.
//
// data_desc:
Expand Down Expand Up @@ -76,15 +76,15 @@ public class DataDescription {
private TNetworkAddress beAddr;
private String lineDelimiter;

// This param is used for non-streaming
// This param is used to check the column expr. It is also used for non-streaming load.
private Map<String, Pair<String, List<String>>> columnToFunction;
/**
* Merged from columns and columnMappingList
* ImportColumnDesc: column name to expr or null
**/
private List<ImportColumnDesc> parsedColumnExprList;

private boolean isPullLoad = false;
private boolean isHadoopLoad = false;

public DataDescription(String tableName,
List<String> partitionNames,
Expand Down Expand Up @@ -178,12 +178,12 @@ public List<ImportColumnDesc> getParsedColumnExprList() {
return parsedColumnExprList;
}

public void setIsPullLoad(boolean isPullLoad) {
this.isPullLoad = isPullLoad;
public void setIsHadoopLoad(boolean isHadoopLoad) {
this.isHadoopLoad = isHadoopLoad;
}

public boolean isPullLoad() {
return isPullLoad;
public boolean isHadoopLoad() {
return isHadoopLoad;
}

/**
Expand All @@ -198,7 +198,7 @@ private void analyzeColumns() throws AnalysisException {
}
// merge columns exprs from columns and columnMappingList
// used to check duplicated column name
Set<String> columnNames = Sets.newHashSet();
Set<String> columnNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
parsedColumnExprList = Lists.newArrayList();
// Step1: analyze columns
for (String columnName : columns) {
Expand Down Expand Up @@ -239,7 +239,7 @@ private void analyzeColumns() throws AnalysisException {
}
// hadoop load only supports the FunctionCallExpr
Expr child1 = predicate.getChild(1);
if (!isPullLoad && !(child1 instanceof FunctionCallExpr)) {
if (isHadoopLoad && !(child1 instanceof FunctionCallExpr)) {
throw new AnalysisException("Hadoop load only supports the designated function. "
+ "The error mapping function is:" + child1.toSql());
}
Expand Down Expand Up @@ -270,11 +270,10 @@ private void analyzeColumnToFunction(String columnName, Expr child1) throws Anal
} else if (paramExpr instanceof NullLiteral) {
args.add(null);
} else {
if (isPullLoad) {
continue;
} else {
if (isHadoopLoad) {
throw new AnalysisException("Mapping function args error, arg: " + paramExpr.toSql());
}
continue;
}
}

Expand All @@ -284,7 +283,7 @@ private void analyzeColumnToFunction(String columnName, Expr child1) throws Anal

public static void validateMappingFunction(String functionName, List<String> args,
Map<String, String> columnNameMap,
Column mappingColumn, boolean isPullLoad) throws AnalysisException {
Column mappingColumn, boolean isHadoopLoad) throws AnalysisException {
if (functionName.equalsIgnoreCase("alignment_timestamp")) {
validateAlignmentTimestamp(args, columnNameMap);
} else if (functionName.equalsIgnoreCase("strftime")) {
Expand All @@ -302,9 +301,7 @@ public static void validateMappingFunction(String functionName, List<String> arg
} else if (functionName.equalsIgnoreCase("now")) {
validateNowFunction(mappingColumn);
} else {
if (isPullLoad) {
return;
} else {
if (isHadoopLoad) {
throw new AnalysisException("Unknown function: " + functionName);
}
}
Expand Down
13 changes: 7 additions & 6 deletions fe/src/main/java/org/apache/doris/analysis/LoadStmt.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,18 @@

package org.apache.doris.analysis;

import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.UserException;
import org.apache.doris.common.util.PrintableMap;
import org.apache.doris.load.Load;
import org.apache.doris.qe.ConnectContext;

import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;

import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
Expand Down Expand Up @@ -214,8 +215,8 @@ public void analyze(Analyzer analyzer) throws UserException {
throw new AnalysisException("No data file in load statement.");
}
for (DataDescription dataDescription : dataDescriptions) {
if (brokerDesc != null) {
dataDescription.setIsPullLoad(true);
if (brokerDesc == null) {
dataDescription.setIsHadoopLoad(true);
}
dataDescription.analyze(label.getDbName());
}
Expand Down
2 changes: 1 addition & 1 deletion fe/src/main/java/org/apache/doris/load/Load.java
Original file line number Diff line number Diff line change
Expand Up @@ -730,7 +730,7 @@ public static void checkAndCreateSource(Database db, DataDescription dataDescrip
Pair<String, List<String>> function = entry.getValue();
try {
DataDescription.validateMappingFunction(function.first, function.second, columnNameMap,
mappingColumn, dataDescription.isPullLoad());
mappingColumn, dataDescription.isHadoopLoad());
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,6 @@ public void analyze() {
try {
stmt = (LoadStmt) parser.parse().value;
for (DataDescription dataDescription : stmt.getDataDescriptions()) {
dataDescription.setIsPullLoad(true);
dataDescription.analyzeWithoutCheckPriv();
}
Database db = Catalog.getCurrentCatalog().getDb(dbId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,8 @@ private void initColumns(ParamCreateContext context) throws UserException {
/**
* This method is used to transform hadoop function.
* The hadoop function includes: replace_value, strftime, time_format, alignment_timestamp, default_value, now.
* The method is used to rewrite those function with real function name and param.
* It rewrites those function with real function name and param.
* For the other function, the expr only go through this function and the origin expr is returned.
* @param columnName
* @param originExpr
* @return
Expand Down

0 comments on commit c990f45

Please sign in to comment.