Package org.apache.spark.sql.errors
Class DataTypeErrors
Object
org.apache.spark.sql.errors.DataTypeErrors
Object for grouping error messages from (most) exceptions thrown during query execution.
This does not include exceptions thrown during the eager execution of commands, which are
grouped into
QueryCompilationErrors
.-
Constructor Summary
-
Method Summary
Modifier and TypeMethodDescriptionstatic Throwable
ambiguousColumnOrFieldError
(scala.collection.Seq<String> name, int numMatches, org.apache.spark.sql.catalyst.trees.Origin context) static Throwable
static ArithmeticException
cannotChangeDecimalPrecisionError
(Decimal value, int decimalPrecision, int decimalScale, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static Throwable
cannotLoadUserDefinedTypeError
(String name, String userClass) static Throwable
cannotMergeDecimalTypesWithIncompatibleScaleError
(int leftScale, int rightScale) static Throwable
cannotMergeIncompatibleDataTypesError
(DataType left, DataType right) static ArithmeticException
castingCauseOverflowError
(String t, DataType from, DataType to) static Throwable
static Throwable
dataTypeUnsupportedError
(String dataType, String failure) static Throwable
decimalCannotGreaterThanPrecisionError
(int scale, int precision) static org.apache.spark.SparkArithmeticException
decimalPrecisionExceedsMaxPrecisionError
(int precision, int maxPrecision) static org.apache.spark.SparkRuntimeException
static org.apache.spark.SparkUnsupportedOperationException
static QueryContext[]
getQueryContext
(org.apache.spark.sql.catalyst.trees.SQLQueryContext sqlContext) static String
getSummary
(org.apache.spark.sql.catalyst.trees.SQLQueryContext sqlContext) static Throwable
invalidDayTimeField
(byte field, scala.collection.Seq<String> supportedIds) static Throwable
invalidDayTimeIntervalType
(String startFieldName, String endFieldName) static Throwable
invalidFieldName
(scala.collection.Seq<String> fieldName, scala.collection.Seq<String> path, org.apache.spark.sql.catalyst.trees.Origin context) static org.apache.spark.SparkNumberFormatException
invalidInputInCastToNumberError
(DataType to, org.apache.spark.unsafe.types.UTF8String s, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static Throwable
invalidYearMonthField
(byte field, scala.collection.Seq<String> supportedIds) static Throwable
negativeScaleNotAllowedError
(int scale) static Throwable
notUserDefinedTypeError
(String name, String userClass) static org.apache.spark.SparkUnsupportedOperationException
static org.apache.spark.SparkArithmeticException
outOfDecimalTypeRangeError
(org.apache.spark.unsafe.types.UTF8String str) static Throwable
schemaFailToParseError
(String schema, Throwable e) static String
static String
static String
static String
static String
static String
toSQLType
(org.apache.spark.sql.types.AbstractDataType t) static String
toSQLValue
(double value) static String
toSQLValue
(float value) static String
toSQLValue
(int value) static String
toSQLValue
(long value) static String
toSQLValue
(short value) static String
toSQLValue
(String value) static String
toSQLValue
(org.apache.spark.unsafe.types.UTF8String value) static ArithmeticException
unscaledValueTooLargeForPrecisionError
(Decimal value, int decimalPrecision, int decimalScale, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) static org.apache.spark.SparkRuntimeException
unsupportedArrayTypeError
(Class<?> clazz) static org.apache.spark.SparkRuntimeException
unsupportedJavaTypeError
(Class<?> clazz) static org.apache.spark.SparkUnsupportedOperationException
static SparkException
unsupportedRoundingMode
(scala.Enumeration.Value roundMode) static Throwable
userSpecifiedSchemaUnsupportedError
(String operation) static Throwable
valueIsNullError
(int index)
-
Constructor Details
-
DataTypeErrors
public DataTypeErrors()
-
-
Method Details
-
unsupportedOperationExceptionError
public static org.apache.spark.SparkUnsupportedOperationException unsupportedOperationExceptionError() -
decimalPrecisionExceedsMaxPrecisionError
public static org.apache.spark.SparkArithmeticException decimalPrecisionExceedsMaxPrecisionError(int precision, int maxPrecision) -
unsupportedRoundingMode
-
outOfDecimalTypeRangeError
public static org.apache.spark.SparkArithmeticException outOfDecimalTypeRangeError(org.apache.spark.unsafe.types.UTF8String str) -
unsupportedJavaTypeError
-
nullLiteralsCannotBeCastedError
public static org.apache.spark.SparkUnsupportedOperationException nullLiteralsCannotBeCastedError(String name) -
notUserDefinedTypeError
-
cannotLoadUserDefinedTypeError
-
unsupportedArrayTypeError
-
schemaFailToParseError
-
invalidDayTimeIntervalType
-
invalidDayTimeField
-
invalidYearMonthField
-
decimalCannotGreaterThanPrecisionError
-
negativeScaleNotAllowedError
-
attributeNameSyntaxError
-
cannotMergeIncompatibleDataTypesError
-
cannotMergeDecimalTypesWithIncompatibleScaleError
public static Throwable cannotMergeDecimalTypesWithIncompatibleScaleError(int leftScale, int rightScale) -
dataTypeUnsupportedError
-
invalidFieldName
-
unscaledValueTooLargeForPrecisionError
public static ArithmeticException unscaledValueTooLargeForPrecisionError(Decimal value, int decimalPrecision, int decimalScale, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
cannotChangeDecimalPrecisionError
public static ArithmeticException cannotChangeDecimalPrecisionError(Decimal value, int decimalPrecision, int decimalScale, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
invalidInputInCastToNumberError
public static org.apache.spark.SparkNumberFormatException invalidInputInCastToNumberError(DataType to, org.apache.spark.unsafe.types.UTF8String s, org.apache.spark.sql.catalyst.trees.SQLQueryContext context) -
ambiguousColumnOrFieldError
-
castingCauseOverflowError
-
failedParsingStructTypeError
-
fieldIndexOnRowWithoutSchemaError
public static org.apache.spark.SparkUnsupportedOperationException fieldIndexOnRowWithoutSchemaError() -
valueIsNullError
-
charOrVarcharTypeAsStringUnsupportedError
-
userSpecifiedSchemaUnsupportedError
-
toSQLId
-
toSQLId
-
toSQLStmt
-
toSQLConf
-
toSQLType
-
toSQLType
-
toSQLValue
-
toSQLValue
-
toSQLValue
-
toSQLValue
-
toSQLValue
-
toSQLValue
-
toSQLValue
-
getSummary
-
getQueryContext
public static QueryContext[] getQueryContext(org.apache.spark.sql.catalyst.trees.SQLQueryContext sqlContext)
-