Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/apache/spark into enablin…
Browse files Browse the repository at this point in the history
…g-tests
  • Loading branch information
itholic committed Aug 9, 2023
2 parents 54ce01b + c73660c commit ed93658
Show file tree
Hide file tree
Showing 144 changed files with 7,886 additions and 1,377 deletions.
17 changes: 17 additions & 0 deletions .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -657,7 +657,22 @@ jobs:
- name: Spark connect jvm client mima check
if: inputs.branch != 'branch-3.3'
run: ./dev/connect-jvm-client-mima-check
- name: Install Python linter dependencies for branch-3.3
if: inputs.branch == 'branch-3.3'
run: |
# SPARK-44554: Copy from https://github.com/apache/spark/blob/073d0b60d31bf68ebacdc005f59b928a5902670f/.github/workflows/build_and_test.yml#L501-L508
# Should delete this section after SPARK 3.3 EOL.
python3.9 -m pip install 'flake8==3.9.0' pydata_sphinx_theme 'mypy==0.920' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' numpydoc 'jinja2<3.0.0' 'black==21.12b0'
python3.9 -m pip install 'pandas-stubs==1.2.0.53'
- name: Install Python linter dependencies for branch-3.4
if: inputs.branch == 'branch-3.4'
run: |
# SPARK-44554: Copy from https://github.com/apache/spark/blob/a05c27e85829fe742c1828507a1fd180cdc84b54/.github/workflows/build_and_test.yml#L571-L578
# Should delete this section after SPARK 3.4 EOL.
python3.9 -m pip install 'flake8==3.9.0' pydata_sphinx_theme 'mypy==0.920' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' numpydoc 'jinja2<3.0.0' 'black==22.6.0'
python3.9 -m pip install 'pandas-stubs==1.2.0.53' ipython 'grpcio==1.48.1' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0'
- name: Install Python linter dependencies
if: inputs.branch != 'branch-3.3' && inputs.branch != 'branch-3.4'
run: |
# TODO(SPARK-32407): Sphinx 3.1+ does not correctly index nested classes.
# See also https://github.com/sphinx-doc/sphinx/issues/7551.
Expand All @@ -668,6 +683,7 @@ jobs:
- name: Python linter
run: PYTHON_EXECUTABLE=python3.9 ./dev/lint-python
- name: Install dependencies for Python code generation check
if: inputs.branch != 'branch-3.3' && inputs.branch != 'branch-3.4'
run: |
# See more in "Installation" https://docs.buf.build/installation#tarball
curl -LO https://github.com/bufbuild/buf/releases/download/v1.24.0/buf-Linux-x86_64.tar.gz
Expand All @@ -676,6 +692,7 @@ jobs:
rm buf-Linux-x86_64.tar.gz
python3.9 -m pip install 'protobuf==3.20.3' 'mypy-protobuf==3.3.0'
- name: Python code generation check
if: inputs.branch != 'branch-3.3' && inputs.branch != 'branch-3.4'
run: if test -f ./dev/connect-check-protos.py; then PATH=$PATH:$HOME/buf/bin PYTHON_EXECUTABLE=python3.9 ./dev/connect-check-protos.py; fi
- name: Install JavaScript linter dependencies
run: |
Expand Down
146 changes: 98 additions & 48 deletions common/utils/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,12 @@
],
"sqlState" : "22003"
},
"ASSIGNMENT_ARITY_MISMATCH" : {
"message" : [
"The number of columns or variables assigned or aliased: <numTarget> does not match the number of source expressions: <numExpr>."
],
"sqlState" : "42802"
},
"AS_OF_JOIN" : {
"message" : [
"Invalid as-of join."
Expand Down Expand Up @@ -332,7 +338,7 @@
},
"CAST_OVERFLOW_IN_TABLE_INSERT" : {
"message" : [
"Fail to insert a value of <sourceType> type into the <targetType> type column <columnName> due to an overflow. Use `try_cast` on the input value to tolerate overflow and return NULL instead."
"Fail to assign a value of <sourceType> type to the <targetType> type column or variable <columnName> due to an overflow. Use `try_cast` on the input value to tolerate overflow and return NULL instead."
],
"sqlState" : "22003"
},
Expand Down Expand Up @@ -764,6 +770,13 @@
],
"sqlState" : "42704"
},
"DEFAULT_PLACEMENT_INVALID" : {
"message" : [
"A DEFAULT keyword in a MERGE, INSERT, UPDATE, or SET VARIABLE command could not be directly assigned to a target column because it was part of an expression.",
"For example: `UPDATE SET c1 = DEFAULT` is allowed, but `UPDATE T SET c1 = DEFAULT + 1` is not allowed."
],
"sqlState" : "42608"
},
"DISTINCT_WINDOW_FUNCTION_UNSUPPORTED" : {
"message" : [
"Distinct window functions are not supported: <windowExpr>."
Expand Down Expand Up @@ -791,6 +804,12 @@
"The metric name is not unique: <metricName>. The same name cannot be used for metrics with different results. However multiple instances of metrics with with same result and name are allowed (e.g. self-joins)."
]
},
"DUPLICATE_ASSIGNMENTS" : {
"message" : [
"The columns or variables <nameList> appear more than once as assignment targets."
],
"sqlState" : "42701"
},
"DUPLICATE_CLAUSES" : {
"message" : [
"Found duplicate clauses: <clauseName>. Please, remove one of them."
Expand Down Expand Up @@ -1227,6 +1246,44 @@
],
"sqlState" : "XX000"
},
"INTERNAL_ERROR_METADATA_CATALOG" : {
"message" : [
"An object in the metadata catalog has been corrupted:"
],
"subClass" : {
"SQL_CONFIG" : {
"message" : [
"Corrupted view SQL configs in catalog."
]
},
"TABLE_NAME_CONTEXT" : {
"message" : [
"Corrupted table name context in catalog: <numParts> parts expected, but part <index> is missing."
]
},
"TEMP_FUNCTION_REFERENCE" : {
"message" : [
"Corrupted view referred temp functions names in catalog."
]
},
"TEMP_VARIABLE_REFERENCE" : {
"message" : [
"Corrupted view referred temp variable names in catalog."
]
},
"TEMP_VIEW_REFERENCE" : {
"message" : [
"Corrupted view referred temp view names in catalog."
]
},
"VIEW_QUERY_COLUMN_ARITY" : {
"message" : [
"Corrupted view query output column names in catalog: <numCols> parts expected, but part <index> is missing."
]
}
},
"sqlState" : "XX000"
},
"INTERNAL_ERROR_NETWORK" : {
"message" : [
"<message>"
Expand Down Expand Up @@ -1344,7 +1401,7 @@
},
"INVALID_DEFAULT_VALUE" : {
"message" : [
"Failed to execute <statement> command because the destination table column <colName> has a DEFAULT value <defaultValue>,"
"Failed to execute <statement> command because the destination column or variable <colName> has a DEFAULT value <defaultValue>,"
],
"subClass" : {
"DATA_TYPE" : {
Expand Down Expand Up @@ -1912,6 +1969,12 @@
"message" : [
"Unsupported function name <funcName>."
]
},
"VARIABLE_TYPE_OR_DEFAULT_REQUIRED" : {
"message" : [
"The definition of a SQL variable requires either a datatype or a DEFAULT clause.",
"For example, use `DECLARE name STRING` or `DECLARE name = 'SQL'` instead of `DECLARE name`."
]
}
},
"sqlState" : "42000"
Expand Down Expand Up @@ -2497,6 +2560,12 @@
],
"sqlState" : "42883"
},
"ROW_SUBQUERY_TOO_MANY_ROWS" : {
"message" : [
"More than one row returned by a subquery used as a row."
],
"sqlState" : "21000"
},
"RULE_ID_NOT_FOUND" : {
"message" : [
"Not found an id for the rule name \"<ruleName>\". Please modify RuleIdCollection.scala if you are adding a new rule."
Expand Down Expand Up @@ -2737,7 +2806,7 @@
},
"UNRESOLVED_COLUMN" : {
"message" : [
"A column or function parameter with name <objectName> cannot be resolved."
"A column, variable, or function parameter with name <objectName> cannot be resolved."
],
"subClass" : {
"WITHOUT_SUGGESTION" : {
Expand Down Expand Up @@ -2801,6 +2870,12 @@
],
"sqlState" : "42703"
},
"UNRESOLVED_VARIABLE" : {
"message" : [
"Cannot resolve variable <variableName> on search path <searchPath>."
],
"sqlState" : "42883"
},
"UNSET_NONEXISTENT_PROPERTIES" : {
"message" : [
"Attempted to unset non-existent properties [<properties>] in table <table>."
Expand Down Expand Up @@ -3097,6 +3172,11 @@
"<property> is a reserved table property, <msg>."
]
},
"SET_VARIABLE_USING_SET" : {
"message" : [
"<variableName> is a VARIABLE and cannot be updated using the SET statement. Use SET VARIABLE <variableName> = ... instead."
]
},
"TABLE_OPERATION" : {
"message" : [
"Table <tableName> does not support <operation>. Please check the current catalog and namespace to make sure the qualified table name is expected, and also check the catalog implementation which is configured by \"spark.sql.catalog\"."
Expand Down Expand Up @@ -3323,6 +3403,21 @@
"3. set \"spark.sql.legacy.allowUntypedScalaUDF\" to \"true\" and use this API with caution."
]
},
"VARIABLE_ALREADY_EXISTS" : {
"message" : [
"Cannot create the variable <variableName> because it already exists.",
"Choose a different name, or drop or replace the existing variable."
],
"sqlState" : "42723"
},
"VARIABLE_NOT_FOUND" : {
"message" : [
"The variable <variableName> cannot be found. Verify the spelling and correctness of the schema and catalog.",
"If you did not qualify the name with a schema and catalog, verify the current_schema() output, or qualify the name with the correct schema and catalog.",
"To tolerate the error on drop use DROP VARIABLE IF EXISTS."
],
"sqlState" : "42883"
},
"VIEW_ALREADY_EXISTS" : {
"message" : [
"Cannot create view <relationName> because it already exists.",
Expand Down Expand Up @@ -3682,11 +3777,6 @@
"FILTER expression contains window function. It cannot be used in an aggregate function."
]
},
"_LEGACY_ERROR_TEMP_1028" : {
"message" : [
"Number of column aliases does not match number of columns. Number of column aliases: <columnSize>; number of columns: <outputSize>."
]
},
"_LEGACY_ERROR_TEMP_1030" : {
"message" : [
"Window aggregate function with filter predicate is not supported yet."
Expand Down Expand Up @@ -3878,31 +3968,6 @@
"Number of buckets should be greater than 0 but less than or equal to bucketing.maxBuckets (`<bucketingMaxBuckets>`). Got `<numBuckets>`."
]
},
"_LEGACY_ERROR_TEMP_1084" : {
"message" : [
"Corrupted table name context in catalog: <numParts> parts expected, but part <index> is missing."
]
},
"_LEGACY_ERROR_TEMP_1085" : {
"message" : [
"Corrupted view SQL configs in catalog."
]
},
"_LEGACY_ERROR_TEMP_1086" : {
"message" : [
"Corrupted view query output column names in catalog: <numCols> parts expected, but part <index> is missing."
]
},
"_LEGACY_ERROR_TEMP_1087" : {
"message" : [
"Corrupted view referred temp view names in catalog."
]
},
"_LEGACY_ERROR_TEMP_1088" : {
"message" : [
"Corrupted view referred temp functions names in catalog."
]
},
"_LEGACY_ERROR_TEMP_1089" : {
"message" : [
"Column statistics deserialization is not supported for column <name> of data type: <dataType>."
Expand Down Expand Up @@ -4785,21 +4850,6 @@
"Sinks cannot request distribution and ordering in continuous execution mode."
]
},
"_LEGACY_ERROR_TEMP_1339" : {
"message" : [
"Failed to execute INSERT INTO command because the VALUES list contains a DEFAULT column reference as part of another expression; this is not allowed."
]
},
"_LEGACY_ERROR_TEMP_1340" : {
"message" : [
"Failed to execute UPDATE command because the SET list contains a DEFAULT column reference as part of another expression; this is not allowed."
]
},
"_LEGACY_ERROR_TEMP_1343" : {
"message" : [
"Failed to execute MERGE INTO command because one of its INSERT or UPDATE assignments contains a DEFAULT column reference as part of another expression; this is not allowed."
]
},
"_LEGACY_ERROR_TEMP_1344" : {
"message" : [
"Invalid DEFAULT value for column <fieldName>: <defaultValue> fails to parse as a valid literal value."
Expand Down

This file was deleted.

Loading

0 comments on commit ed93658

Please sign in to comment.