Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
198 changes: 108 additions & 90 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -1781,6 +1781,78 @@
],
"sqlState" : "42K03"
},
"DATASOURCE_BATCH_SCAN_NOT_SUPPORTED" : {
"message" : [
"<description>: Batch scan is not supported."
],
"sqlState" : "0A000"
},
"DATASOURCE_BATCH_WRITE_BUILD_NOT_SUPPORTED" : {
"message" : [
"<class> does not support batch write."
],
"sqlState" : "0A000"
},
"DATASOURCE_BATCH_WRITE_NOT_SUPPORTED" : {
"message" : [
"<description>: Batch write is not supported."
],
"sqlState" : "0A000"
},
"DATASOURCE_COLUMNAR_READER_NOT_SUPPORTED" : {
"message" : [
"Cannot create columnar reader."
],
"sqlState" : "0A000"
},
"DATASOURCE_CONTINUOUS_SCAN_NOT_SUPPORTED" : {
"message" : [
"<description>: Continuous scan is not supported."
],
"sqlState" : "0A000"
},
"DATASOURCE_DELTA_BATCH_WRITE_NOT_SUPPORTED" : {
"message" : [
"<description>: Delta batch write is not supported."
],
"sqlState" : "0A000"
},
"DATASOURCE_DELTA_WRITE_BUILD_NOT_IMPLEMENTED" : {
"message" : [
"<class> does not implement build."
],
"sqlState" : "0A000"
},
"DATASOURCE_METADATA_SCHEMA_NOT_IMPLEMENTED" : {
"message" : [
"<class> does not implement metadataSchema."
],
"sqlState" : "0A000"
},
"DATASOURCE_MICRO_BATCH_SCAN_NOT_SUPPORTED" : {
"message" : [
"<description>: Micro-batch scan is not supported."
],
"sqlState" : "0A000"
},
"DATASOURCE_ROW_ID_SCHEMA_NOT_IMPLEMENTED" : {
"message" : [
"<class> does not implement rowIdSchema."
],
"sqlState" : "0A000"
},
"DATASOURCE_STREAMING_WRITE_BUILD_NOT_SUPPORTED" : {
"message" : [
"<class> does not support streaming write."
],
"sqlState" : "0A000"
},
"DATASOURCE_STREAMING_WRITE_NOT_SUPPORTED" : {
"message" : [
"<description>: Streaming write is not supported."
],
"sqlState" : "0A000"
},
"DATETIME_FIELD_OUT_OF_BOUNDS" : {
"message" : [
"<rangeMessage>."
Expand Down Expand Up @@ -5537,6 +5609,12 @@
],
"sqlState" : "428FT"
},
"PARTITIONS_TRUNCATE_NOT_SUPPORTED" : {
"message" : [
"Partitions truncate is not supported."
],
"sqlState" : "0A000"
},
"PARTITION_BY_NOT_ALLOWED_WITH_INSERT_INTO" : {
"message" : [
"partitionBy() cannot be used with insertInto(). Partition columns for table <tableName> are determined by the table definition."
Expand All @@ -5549,12 +5627,24 @@
],
"sqlState" : "42000"
},
"PARTITION_RENAME_NOT_SUPPORTED" : {
"message" : [
"Partition renaming is not supported."
],
"sqlState" : "0A000"
},
"PARTITION_TRANSFORM_EXPRESSION_NOT_IN_PARTITIONED_BY" : {
"message" : [
"The expression <expression> must be inside 'partitionedBy'."
],
"sqlState" : "42S23"
},
"PARTITION_TRUNCATE_NOT_SUPPORTED" : {
"message" : [
"Partition truncate is not supported."
],
"sqlState" : "0A000"
},
"PATH_ALREADY_EXISTS" : {
"message" : [
"Path <outputPath> already exists. Set mode as \"overwrite\" to overwrite the existing path."
Expand Down Expand Up @@ -6982,6 +7072,12 @@
],
"sqlState" : "XX000"
},
"UNEXPECTED_V2_EXPRESSION" : {
"message" : [
"Unexpected V2 expression: <expr>."
],
"sqlState" : "42000"
},
"UNION_NOT_SUPPORTED_IN_RECURSIVE_CTE" : {
"message" : [
"The UNION operator is not yet supported within recursive common table expressions (WITH clauses that refer to themselves, directly or indirectly). Please use UNION ALL instead."
Expand Down Expand Up @@ -8203,6 +8299,18 @@
],
"sqlState" : "42000"
},
"V2_EXPRESSION_SQL_BUILDER_UDAF_NOT_SUPPORTED" : {
"message" : [
"<class> does not support user defined aggregate function: <funcName>."
],
"sqlState" : "0A000"
},
"V2_EXPRESSION_SQL_BUILDER_UDF_NOT_SUPPORTED" : {
"message" : [
"<class> does not support user defined function: <funcName>."
],
"sqlState" : "0A000"
},
"VARIABLE_ALREADY_EXISTS" : {
"message" : [
"Cannot create the variable <variableName> because it already exists.",
Expand Down Expand Up @@ -10696,91 +10804,6 @@
"CaseInsensitiveStringMap is read-only."
]
},
"_LEGACY_ERROR_TEMP_3133" : {
"message" : [
"<class> does not implement rowIdSchema"
]
},
"_LEGACY_ERROR_TEMP_3134" : {
"message" : [
"<class> does not implement metadataSchema"
]
},
"_LEGACY_ERROR_TEMP_3135" : {
"message" : [
"<class> does not support batch write"
]
},
"_LEGACY_ERROR_TEMP_3136" : {
"message" : [
"<class> does not support streaming write"
]
},
"_LEGACY_ERROR_TEMP_3137" : {
"message" : [
"<description>: Batch write is not supported"
]
},
"_LEGACY_ERROR_TEMP_3138" : {
"message" : [
"<description>: Streaming write is not supported"
]
},
"_LEGACY_ERROR_TEMP_3139" : {
"message" : [
"<description>: Delta batch write is not supported"
]
},
"_LEGACY_ERROR_TEMP_3140" : {
"message" : [
"<class> does not implement build"
]
},
"_LEGACY_ERROR_TEMP_3141" : {
"message" : [
"<class> does not support user defined function: <funcName>"
]
},
"_LEGACY_ERROR_TEMP_3142" : {
"message" : [
"<class> does not support user defined aggregate function: <funcName>"
]
},
"_LEGACY_ERROR_TEMP_3143" : {
"message" : [
"Partition renaming is not supported"
]
},
"_LEGACY_ERROR_TEMP_3144" : {
"message" : [
"Partition truncate is not supported"
]
},
"_LEGACY_ERROR_TEMP_3145" : {
"message" : [
"Partitions truncate is not supported"
]
},
"_LEGACY_ERROR_TEMP_3147" : {
"message" : [
"<description>: Batch scan are not supported"
]
},
"_LEGACY_ERROR_TEMP_3148" : {
"message" : [
"<description>: Micro-batch scan are not supported"
]
},
"_LEGACY_ERROR_TEMP_3149" : {
"message" : [
"<description>: Continuous scan are not supported"
]
},
"_LEGACY_ERROR_TEMP_3150" : {
"message" : [
"Cannot create columnar reader."
]
},
"_LEGACY_ERROR_TEMP_3152" : {
"message" : [
"Datatype not supported <dataType>"
Expand Down Expand Up @@ -10986,11 +11009,6 @@
"<value> is not a boolean string."
]
},
"_LEGACY_ERROR_TEMP_3207" : {
"message" : [
"Unexpected V2 expression: <expr>"
]
},
"_LEGACY_ERROR_TEMP_3208" : {
"message" : [
"The number of fields (<numFields>) in the partition identifier is not equal to the partition schema length (<schemaLen>). The identifier might not refer to one partition."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,6 @@ default boolean purgePartitions(InternalRow[] idents)
*/
default boolean truncatePartitions(InternalRow[] idents)
throws NoSuchPartitionException, SparkUnsupportedOperationException {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3145");
throw new SparkUnsupportedOperationException("PARTITIONS_TRUNCATE_NOT_SUPPORTED");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ default boolean renamePartition(InternalRow from, InternalRow to)
throws SparkUnsupportedOperationException,
PartitionsAlreadyExistException,
NoSuchPartitionException {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3143");
throw new SparkUnsupportedOperationException("PARTITION_RENAME_NOT_SUPPORTED");
}

/**
Expand All @@ -172,6 +172,6 @@ default boolean renamePartition(InternalRow from, InternalRow to)
*/
default boolean truncatePartition(InternalRow ident)
throws NoSuchPartitionException, SparkUnsupportedOperationException {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3144");
throw new SparkUnsupportedOperationException("PARTITION_TRUNCATE_NOT_SUPPORTED");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ public interface PartitionReaderFactory extends Serializable {
* {@link InputPartition} class defined for the data source.
*/
default PartitionReader<ColumnarBatch> createColumnarReader(InputPartition partition) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3150");
throw new SparkUnsupportedOperationException("DATASOURCE_COLUMNAR_READER_NOT_SUPPORTED");
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ default String description() {
*/
default Batch toBatch() {
throw new SparkUnsupportedOperationException(
"_LEGACY_ERROR_TEMP_3147", Map.of("description", description()));
"DATASOURCE_BATCH_SCAN_NOT_SUPPORTED", Map.of("description", description()));
}

/**
Expand All @@ -95,7 +95,7 @@ default Batch toBatch() {
*/
default MicroBatchStream toMicroBatchStream(String checkpointLocation) {
throw new SparkUnsupportedOperationException(
"_LEGACY_ERROR_TEMP_3148", Map.of("description", description()));
"DATASOURCE_MICRO_BATCH_SCAN_NOT_SUPPORTED", Map.of("description", description()));
}

/**
Expand All @@ -112,7 +112,7 @@ default MicroBatchStream toMicroBatchStream(String checkpointLocation) {
*/
default ContinuousStream toContinuousStream(String checkpointLocation) {
throw new SparkUnsupportedOperationException(
"_LEGACY_ERROR_TEMP_3149", Map.of("description", description()));
"DATASOURCE_CONTINUOUS_SCAN_NOT_SUPPORTED", Map.of("description", description()));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,6 @@ public interface ContinuousPartitionReaderFactory extends PartitionReaderFactory

@Override
default ContinuousPartitionReader<ColumnarBatch> createColumnarReader(InputPartition partition) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3150");
throw new SparkUnsupportedOperationException("DATASOURCE_COLUMNAR_READER_NOT_SUPPORTED");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -319,20 +319,20 @@ protected String visitInverseDistributionFunction(
protected String visitUserDefinedScalarFunction(
String funcName, String canonicalName, String[] inputs) {
throw new SparkUnsupportedOperationException(
"_LEGACY_ERROR_TEMP_3141",
"V2_EXPRESSION_SQL_BUILDER_UDF_NOT_SUPPORTED",
Map.of("class", this.getClass().getSimpleName(), "funcName", funcName));
}

protected String visitUserDefinedAggregateFunction(
String funcName, String canonicalName, boolean isDistinct, String[] inputs) {
throw new SparkUnsupportedOperationException(
"_LEGACY_ERROR_TEMP_3142",
"V2_EXPRESSION_SQL_BUILDER_UDAF_NOT_SUPPORTED",
Map.of("class", this.getClass().getSimpleName(), "funcName", funcName));
}

protected String visitUnexpectedExpr(Expression expr) throws IllegalArgumentException {
throw new SparkIllegalArgumentException(
"_LEGACY_ERROR_TEMP_3207", Map.of("expr", String.valueOf(expr)));
"UNEXPECTED_V2_EXPRESSION", Map.of("expr", String.valueOf(expr)));
}

protected String visitPartitionPredicate(PartitionPredicate partitionPredicate) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,6 @@ public interface DeltaWrite extends Write {
@Override
default DeltaBatchWrite toBatch() {
throw new SparkUnsupportedOperationException(
"_LEGACY_ERROR_TEMP_3139", Map.of("description", description()));
"DATASOURCE_DELTA_BATCH_WRITE_NOT_SUPPORTED", Map.of("description", description()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,6 @@ public interface DeltaWriteBuilder extends WriteBuilder {
@Override
default DeltaWrite build() {
throw new SparkUnsupportedOperationException(
"_LEGACY_ERROR_TEMP_3140", Map.of("class", getClass().getName()));
"DATASOURCE_DELTA_WRITE_BUILD_NOT_IMPLEMENTED", Map.of("class", getClass().getName()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -55,14 +55,14 @@ public interface LogicalWriteInfo {
*/
default Optional<StructType> rowIdSchema() {
throw new SparkUnsupportedOperationException(
"_LEGACY_ERROR_TEMP_3133", Map.of("class", getClass().getName()));
"DATASOURCE_ROW_ID_SCHEMA_NOT_IMPLEMENTED", Map.of("class", getClass().getName()));
}

/**
* the schema of the input metadata from Spark to data source.
*/
default Optional<StructType> metadataSchema() {
throw new SparkUnsupportedOperationException(
"_LEGACY_ERROR_TEMP_3134", Map.of("class", getClass().getName()));
"DATASOURCE_METADATA_SCHEMA_NOT_IMPLEMENTED", Map.of("class", getClass().getName()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ default String description() {
*/
default BatchWrite toBatch() {
throw new SparkUnsupportedOperationException(
"_LEGACY_ERROR_TEMP_3137", Map.of("description", description()));
"DATASOURCE_BATCH_WRITE_NOT_SUPPORTED", Map.of("description", description()));
}

/**
Expand All @@ -67,7 +67,7 @@ default BatchWrite toBatch() {
*/
default StreamingWrite toStreaming() {
throw new SparkUnsupportedOperationException(
"_LEGACY_ERROR_TEMP_3138", Map.of("description", description()));
"DATASOURCE_STREAMING_WRITE_NOT_SUPPORTED", Map.of("description", description()));
}

/**
Expand Down
Loading