public class QueryCompilationErrors
extends Object
Constructor and Description |
---|
QueryCompilationErrors() |
Modifier and Type | Method and Description |
---|---|
static Throwable |
actionNotAllowedOnTableSincePartitionMetadataNotStoredError(String action,
String tableName) |
static Throwable |
actionNotAllowedOnTableWithFilesourcePartitionManagementDisabledError(String action,
String tableName) |
static Throwable |
addColumnWithV1TableCannotSpecifyNotNullError() |
static Throwable |
aggregateExpressionRequiredForPivotError(String sql) |
static Throwable |
aggregationFunctionAppliedOnNonNumericColumnError(String colName) |
static Throwable |
aggregationFunctionAppliedOnNonNumericColumnError(String pivotColumn,
int maxValues) |
static Throwable |
aliasesNumberNotMatchUDTFOutputError(int aliasesSize,
String aliasesNames) |
static Throwable |
aliasNumberNotMatchColumnNumberError(int columnSize,
int outputSize,
org.apache.spark.sql.catalyst.trees.TreeNode<?> t) |
static Throwable |
alterAddColNotSupportDatasourceTableError(Object tableType,
org.apache.spark.sql.catalyst.TableIdentifier table) |
static Throwable |
alterAddColNotSupportViewError(org.apache.spark.sql.catalyst.TableIdentifier table) |
static Throwable |
alterColumnCannotFindColumnInV1TableError(String colName,
org.apache.spark.sql.connector.catalog.V1Table v1Table) |
static Throwable |
alterColumnWithV1TableCannotSpecifyNotNullError() |
static Throwable |
alterDatabaseLocationUnsupportedError(String version) |
static Throwable |
alterOnlySupportedWithV2TableError() |
static Throwable |
alterQualifiedColumnOnlySupportedWithV2TableError() |
static Throwable |
alterTableChangeColumnNotSupportedForColumnTypeError(StructField originColumn,
StructField newColumn) |
static Throwable |
alterTableRecoverPartitionsNotSupportedForV2TablesError() |
static Throwable |
alterTableSerDePropertiesNotSupportedForV2TablesError() |
static Throwable |
alterTableSetSerdeForSpecificPartitionNotSupportedError() |
static Throwable |
alterTableSetSerdeNotSupportedError() |
static Throwable |
alterV2TableSetLocationWithPartitionNotSupportedError() |
static Throwable |
ambiguousAttributesInSelfJoinError(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.AttributeReference> ambiguousAttrs) |
static Throwable |
ambiguousFieldNameError(scala.collection.Seq<String> fieldName,
int numMatches,
org.apache.spark.sql.catalyst.trees.Origin context) |
static Throwable |
ambiguousReferenceToFieldsError(String fields) |
static Throwable |
ambiguousRelationAliasNameInNestedCTEError(String name) |
static Throwable |
analyzeTableNotSupportedForV2TablesError() |
static Throwable |
analyzeTableNotSupportedOnViewsError() |
static Throwable |
analyzingColumnStatisticsNotSupportedForColumnTypeError(String name,
DataType dataType) |
static Throwable |
arrayComponentTypeUnsupportedError(Class<?> clz) |
static Throwable |
attributeNameSyntaxError(String name) |
static Throwable |
attributeNotFoundError(String colName,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child) |
static Throwable |
batchWriteCapabilityError(Table table,
String v2WriteClassName,
String v1WriteClassName) |
static Throwable |
bucketByAndSortByUnsupportedByOperationError(String operation) |
static Throwable |
bucketByUnsupportedByOperationError(String operation) |
static Throwable |
bucketingColumnCannotBePartOfPartitionColumnsError(String bucketCol,
scala.collection.Seq<String> normalizedPartCols) |
static Throwable |
bucketSortingColumnCannotBePartOfPartitionColumnsError(String sortCol,
scala.collection.Seq<String> normalizedPartCols) |
static Throwable |
cannotAlterTableWithAlterViewError() |
static Throwable |
cannotAlterViewWithAlterTableError() |
static Throwable |
cannotApplyTableValuedFunctionError(String name,
String arguments,
String usage,
String details) |
static Throwable |
cannotConvertBucketWithSortColumnsToTransformError(org.apache.spark.sql.catalyst.catalog.BucketSpec spec) |
static Throwable |
cannotConvertDataTypeToParquetTypeError(StructField field) |
static Throwable |
cannotConvertTransformsToPartitionColumnsError(scala.collection.Seq<Transform> nonIdTransforms) |
static Throwable |
cannotCreateDatabaseWithSameNameAsPreservedDatabaseError(String database) |
static Throwable |
cannotCreateJDBCNamespaceUsingProviderError() |
static Throwable |
cannotCreateJDBCNamespaceWithPropertyError(String k) |
static Throwable |
cannotCreateJDBCTableUsingLocationError() |
static Throwable |
cannotCreateJDBCTableUsingProviderError() |
static Throwable |
cannotCreateTableWithBothProviderAndSerdeError(scala.Option<String> provider,
scala.Option<org.apache.spark.sql.catalyst.plans.logical.SerdeInfo> maybeSerdeInfo) |
static Throwable |
cannotCreateTempViewUsingHiveDataSourceError() |
static Throwable |
cannotDeleteTableWhereFiltersError(Table table,
Filter[] filters) |
static Throwable |
cannotDropDefaultDatabaseError() |
static Throwable |
cannotDropNativeFuncError(String functionName) |
static Throwable |
cannotDropViewWithDropTableError() |
static Throwable |
cannotFindCatalogToHandleIdentifierError(String quote) |
static Throwable |
cannotFindColumnError(String name,
String[] fieldNames) |
static Throwable |
cannotFindColumnInRelationOutputError(String colName,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan relation) |
static Throwable |
cannotFindPartitionColumnInPartitionSchemaError(StructField readField,
StructType partitionSchema) |
static Throwable |
cannotLoadClassNotOnClassPathError(String className) |
static Throwable |
cannotLoadClassWhenRegisteringFunctionError(String className,
org.apache.spark.sql.catalyst.FunctionIdentifier func) |
static Throwable |
cannotModifyValueOfSparkConfigError(String key) |
static Throwable |
cannotModifyValueOfStaticConfigError(String key) |
static Throwable |
cannotOperateManagedTableWithExistingLocationError(String methodName,
org.apache.spark.sql.catalyst.TableIdentifier tableIdentifier,
org.apache.hadoop.fs.Path tableLocation) |
static Throwable |
cannotOperateOnHiveDataSourceFilesError(String operation) |
static Throwable |
cannotOverwritePathBeingReadFromError() |
static Throwable |
cannotOverwriteTableThatIsBeingReadFromError(String tableName) |
static Throwable |
cannotParseIntervalError(String delayThreshold,
Throwable e) |
static Throwable |
cannotPartitionByNestedColumnError(NamedReference reference) |
static Throwable |
cannotPassTypedColumnInUntypedSelectError(String typedCol) |
static Throwable |
cannotReadCorruptedTablePropertyError(String key,
String details) |
static Throwable |
cannotRefreshBuiltInFuncError(String functionName) |
static Throwable |
cannotRefreshTempFuncError(String functionName) |
static Throwable |
cannotRenameTableWithAlterViewError() |
static Throwable |
cannotRenameTempViewToExistingTableError(org.apache.spark.sql.catalyst.TableIdentifier oldName,
org.apache.spark.sql.catalyst.TableIdentifier newName) |
static Throwable |
cannotRenameTempViewWithDatabaseSpecifiedError(org.apache.spark.sql.catalyst.TableIdentifier oldName,
org.apache.spark.sql.catalyst.TableIdentifier newName) |
static Throwable |
cannotReplaceMissingTableError(Identifier tableIdentifier) |
static Throwable |
cannotReplaceMissingTableError(Identifier tableIdentifier,
scala.Option<Throwable> cause) |
static Throwable |
cannotResolveAttributeError(String name,
String outputStr) |
static Throwable |
cannotResolveColumnGivenInputColumnsError(String col,
String inputColumns) |
static Throwable |
cannotResolveColumnNameAmongAttributesError(String colName,
String fieldNames) |
static AnalysisException |
cannotResolveColumnNameAmongFieldsError(String colName,
String fieldsStr,
String extraMsg) |
static Throwable |
cannotResolveStarExpandGivenInputColumnsError(String targetString,
String columns) |
static Throwable |
cannotResolveUserSpecifiedColumnsError(String col,
org.apache.spark.sql.catalyst.trees.TreeNode<?> t) |
static Throwable |
cannotRetrieveTableOrViewNotInSameDatabaseError(scala.collection.Seq<org.apache.spark.sql.catalyst.QualifiedTableName> qualifiedTableNames) |
static Throwable |
cannotSaveIntervalIntoExternalStorageError() |
static Throwable |
cannotSetJDBCNamespaceWithPropertyError(String k) |
static Throwable |
cannotSpecifyDatabaseForTempViewError(org.apache.spark.sql.catalyst.TableIdentifier tableIdent) |
static Throwable |
cannotSpecifyWindowFrameError(String prettyName) |
static Throwable |
cannotTranslateExpressionToSourceFilterError(org.apache.spark.sql.catalyst.expressions.Expression f) |
static Throwable |
cannotUnsetJDBCNamespaceWithPropertyError(String k) |
static Throwable |
cannotUpCastAsAttributeError(org.apache.spark.sql.catalyst.expressions.Attribute fromAttr,
org.apache.spark.sql.catalyst.expressions.Attribute toAttr) |
static Throwable |
cannotUseAllColumnsForPartitionColumnsError() |
static Throwable |
cannotUseCatalogError(CatalogPlugin plugin,
String msg) |
static Throwable |
cannotUseDataTypeForPartitionColumnError(StructField field) |
static Throwable |
cannotUseIntervalTypeInTableSchemaError() |
static Throwable |
cannotUseMixtureOfAggFunctionAndGroupAggPandasUDFError() |
static Throwable |
cannotUsePreservedDatabaseAsCurrentDatabaseError(String database) |
static Throwable |
cannotWriteDataToRelationsWithMultiplePathsError() |
static Throwable |
cannotWriteIncompatibleDataToTableError(String tableName,
scala.collection.Seq<String> errors) |
static Throwable |
cannotWriteNotEnoughColumnsToTableError(String tableName,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> expected,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query) |
static Throwable |
cannotWriteTooManyColumnsToTableError(String tableName,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> expected,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query) |
static Throwable |
charOrVarcharTypeAsStringUnsupportedError() |
static Throwable |
checkpointLocationNotSpecifiedError() |
static Throwable |
classDoesNotImplementUserDefinedAggregateFunctionError(String className) |
static Throwable |
classWithoutPublicNonArgumentConstructorError(String className) |
static Throwable |
cmdOnlyWorksOnPartitionedTablesError(String cmd,
String tableIdentWithDB) |
static Throwable |
cmdOnlyWorksOnTableWithLocationError(String cmd,
String tableIdentWithDB) |
static Throwable |
columnDoesNotExistError(String colName) |
static Throwable |
columnNameContainsInvalidCharactersError(String name) |
static Throwable |
columnNotDefinedInTableError(String colType,
String colName,
String tableName,
scala.collection.Seq<String> tableCols) |
static Throwable |
columnNotFoundInExistingColumnsError(String columnType,
String columnName,
scala.collection.Seq<String> validColumnNames) |
static Throwable |
columnNotFoundInSchemaError(StructField col,
scala.Option<StructType> tableSchema) |
static Throwable |
columnStatisticsDeserializationNotSupportedError(String name,
DataType dataType) |
static Throwable |
columnStatisticsSerializationNotSupportedError(String colName,
DataType dataType) |
static Throwable |
columnTypeNotSupportStatisticsCollectionError(String name,
org.apache.spark.sql.catalyst.TableIdentifier tableIdent,
DataType dataType) |
static Throwable |
commandExecutionInRunnerUnsupportedError(String runner) |
static Throwable |
commandNotSupportNestedColumnError(String command,
String quoted) |
static Throwable |
commandUnsupportedInV2TableError(String name) |
static Throwable |
configRemovedInVersionError(String configName,
String version,
String comment) |
static Throwable |
conflictingAttributesInJoinConditionError(org.apache.spark.sql.catalyst.expressions.AttributeSet conflictingAttrs,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan outerPlan,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan subplan) |
static Throwable |
corruptedTableNameContextInCatalogError(int numParts,
int index) |
static Throwable |
corruptedViewQueryOutputColumnsInCatalogError(String numCols,
int index) |
static Throwable |
corruptedViewReferredTempFunctionsInCatalogError(Exception e) |
static Throwable |
corruptedViewReferredTempViewInCatalogError(Exception e) |
static Throwable |
corruptedViewSQLConfigsInCatalogError(Exception e) |
static Throwable |
createExternalTableWithoutLocationError() |
static Throwable |
createFuncWithBothIfNotExistsAndReplaceError() |
static Throwable |
createPersistedViewFromDatasetAPINotAllowedError() |
static Throwable |
createTableAsSelectWithNonEmptyDirectoryError(String tablePath) |
static Throwable |
createTableColumnTypesOptionColumnNotFoundInSchemaError(String col,
StructType schema) |
static Throwable |
createViewNumColumnsMismatchUserSpecifiedColumnLengthError(int analyzedPlanLength,
int userSpecifiedColumnsLength) |
static Throwable |
createViewWithBothIfNotExistsAndReplaceError() |
static Throwable |
databaseDoesNotExistError(String dbName) |
static Throwable |
databaseFromV1SessionCatalogNotSpecifiedError() |
static Throwable |
databaseNotEmptyError(String db,
String details) |
static Throwable |
dataPathNotExistError(String path) |
static Throwable |
dataSchemaNotSpecifiedError(String format) |
static Throwable |
dataSchemaNotSpecifiedError(String format,
String fileCatalog) |
static Throwable |
dataSourceOutputModeUnsupportedError(String className,
OutputMode outputMode) |
static Throwable |
dataTypeMismatchForDeserializerError(DataType dataType,
String desiredType) |
static Throwable |
dataTypeUnsupportedByClassError(DataType x,
String className) |
static Throwable |
dataTypeUnsupportedByDataSourceError(String format,
StructField field) |
static Throwable |
dataTypeUnsupportedByExtractValueError(DataType dataType,
org.apache.spark.sql.catalyst.expressions.Expression extraction,
org.apache.spark.sql.catalyst.expressions.Expression child) |
static Throwable |
ddlWithoutHiveSupportEnabledError(String detail) |
static Throwable |
decimalCannotGreaterThanPrecisionError(int scale,
int precision) |
static Throwable |
decimalOnlySupportPrecisionUptoError(String decimalType,
int precision) |
static Throwable |
defineTempFuncWithIfNotExistsError() |
static Throwable |
defineTempViewWithIfNotExistsError() |
static Throwable |
deleteOnlySupportedWithV2TablesError() |
static Throwable |
descPartitionNotAllowedOnTempView(String table) |
static Throwable |
descPartitionNotAllowedOnView(String table) |
static Throwable |
descPartitionNotAllowedOnViewError(String table) |
static Throwable |
describeDoesNotSupportPartitionForV2TablesError() |
static Throwable |
dropColumnOnlySupportedWithV2TableError() |
static Throwable |
dropNonExistentColumnsNotSupportedError(scala.collection.Seq<String> nonExistentColumnNames) |
static Throwable |
emptyMultipartIdentifierError() |
static Throwable |
emptyWindowExpressionError(org.apache.spark.sql.catalyst.plans.logical.Window expr) |
static Throwable |
expectTableNotViewError(org.apache.spark.sql.catalyst.analysis.ResolvedView v,
String cmd,
scala.Option<String> mismatchHint,
org.apache.spark.sql.catalyst.trees.TreeNode<?> t) |
static Throwable |
expectTableOrPermanentViewNotTempViewError(String quoted,
String cmd,
org.apache.spark.sql.catalyst.trees.TreeNode<?> t) |
static Throwable |
expectViewNotTableError(org.apache.spark.sql.catalyst.analysis.ResolvedTable v,
String cmd,
scala.Option<String> mismatchHint,
org.apache.spark.sql.catalyst.trees.TreeNode<?> t) |
static Throwable |
expressionWithMultiWindowExpressionsError(org.apache.spark.sql.catalyst.expressions.NamedExpression expr,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.WindowSpecDefinition> distinctWindowSpec) |
static Throwable |
expressionWithoutWindowExpressionError(org.apache.spark.sql.catalyst.expressions.NamedExpression expr) |
static Throwable |
externalCatalogNotSupportShowViewsError(org.apache.spark.sql.catalyst.analysis.ResolvedNamespace resolved) |
static Throwable |
failedFallbackParsingError(String msg,
Throwable e1,
Throwable e2) |
static Throwable |
failedToFindAvroDataSourceError(String provider) |
static Throwable |
failedToFindKafkaDataSourceError(String provider) |
static Throwable |
failedToRebuildExpressionError(Filter filter) |
static Throwable |
failToResolveDataSourceForTableError(org.apache.spark.sql.catalyst.catalog.CatalogTable table,
String key) |
static Throwable |
failToTruncateTableWhenRemovingDataError(String tableIdentWithDB,
org.apache.hadoop.fs.Path path,
Throwable e) |
static Throwable |
fieldNumberMismatchForDeserializerError(StructType schema,
int maxOrdinal) |
static Throwable |
findMultipleDataSourceError(String provider,
scala.collection.Seq<String> sourceNames) |
static Throwable |
foundDifferentWindowFunctionTypeError(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression> windowExpressions) |
static Throwable |
foundDuplicateColumnError(String colType,
scala.collection.Seq<String> duplicateCol) |
static Throwable |
functionAcceptsOnlyOneArgumentError(String name) |
static Throwable |
functionAlreadyExistsError(org.apache.spark.sql.catalyst.FunctionIdentifier func) |
static Throwable |
functionCannotProcessInputError(UnboundFunction unbound,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> arguments,
UnsupportedOperationException unsupported) |
static Throwable |
functionUndefinedError(org.apache.spark.sql.catalyst.FunctionIdentifier name) |
static Throwable |
functionUnsupportedInV2CatalogError() |
static Throwable |
functionWithUnsupportedSyntaxError(String prettyName,
String syntax) |
static Throwable |
generatorNotExpectedError(org.apache.spark.sql.catalyst.FunctionIdentifier name,
String classCanonicalName) |
static Throwable |
generatorOutsideSelectError(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan) |
static Throwable |
groupAggPandasUDFUnsupportedByStreamingAggError() |
static Throwable |
groupByPositionRangeError(int index,
int size) |
static Throwable |
groupByPositionRefersToAggregateFunctionError(int index,
org.apache.spark.sql.catalyst.expressions.Expression expr) |
static Throwable |
groupingColInvalidError(org.apache.spark.sql.catalyst.expressions.Expression groupingCol,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupByExprs) |
static Throwable |
groupingIDMismatchError(org.apache.spark.sql.catalyst.expressions.GroupingID groupingID,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupByExprs) |
static Throwable |
groupingMustWithGroupingSetsOrCubeOrRollupError() |
static Throwable |
groupingSizeTooLargeError(int sizeLimit) |
static Throwable |
hiveCreatePermanentFunctionsUnsupportedError() |
static Throwable |
hiveTableTypeUnsupportedError(String tableType) |
static Throwable |
hostOptionNotSetError() |
static Throwable |
identifierHavingMoreThanTwoNamePartsError(String quoted,
String identifier) |
static Throwable |
illegalParquetTypeError(String parquetType) |
static Throwable |
incompatibleRangeInputDataTypeError(org.apache.spark.sql.catalyst.expressions.Expression expression,
DataType dataType) |
static Throwable |
incompatibleViewSchemaChange(String viewName,
String colName,
int expectedNum,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> actualCols,
scala.Option<String> viewDDL) |
static Throwable |
inputSourceDiffersFromDataSourceProviderError(String source,
String tableName,
org.apache.spark.sql.catalyst.catalog.CatalogTable table) |
static Throwable |
insertIntoViewNotAllowedError(org.apache.spark.sql.catalyst.TableIdentifier identifier,
org.apache.spark.sql.catalyst.trees.TreeNode<?> t) |
static Throwable |
insertMismatchedColumnNumberError(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> targetAttributes,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> sourceAttributes,
int staticPartitionsSize) |
static Throwable |
insertMismatchedPartitionNumberError(StructType targetPartitionSchema,
int providedPartitionsSize) |
static Throwable |
invalidBoundaryEndError(long end) |
static Throwable |
invalidBoundaryStartError(long start) |
static Throwable |
invalidBucketNumberError(int bucketingMaxBuckets,
int numBuckets) |
static Throwable |
invalidBucketsNumberError(String numBuckets,
String e) |
static Throwable |
invalidCoalesceHintParameterError(String hintName) |
static Throwable |
invalidDatabaseNameError(String quoted) |
static Throwable |
invalidDataSourceError(String className) |
static Throwable |
invalidDayTimeField(byte field) |
static Throwable |
invalidDayTimeIntervalType(String startFieldName,
String endFieldName) |
static Throwable |
invalidFieldName(scala.collection.Seq<String> fieldName,
scala.collection.Seq<String> path,
org.apache.spark.sql.catalyst.trees.Origin context) |
static Throwable |
invalidFieldTypeForCorruptRecordError() |
static Throwable |
invalidFileFormatForStoredAsError(org.apache.spark.sql.catalyst.plans.logical.SerdeInfo serdeInfo) |
static Throwable |
invalidFunctionArgumentNumberError(scala.collection.Seq<Object> validParametersCount,
String name,
scala.collection.Seq<Class<org.apache.spark.sql.catalyst.expressions.Expression>> params) |
static Throwable |
invalidFunctionArgumentsError(String name,
String expectedInfo,
int actualNumber) |
static Throwable |
invalidHintParameterError(String hintName,
scala.collection.Seq<Object> invalidParams) |
static Throwable |
invalidIncludeTimestampValueError() |
static Throwable |
invalidJoinTypeInJoinWithError(org.apache.spark.sql.catalyst.plans.JoinType joinType) |
static Throwable |
invalidLiteralForWindowDurationError() |
static Throwable |
invalidNameForTableOrDatabaseError(String name) |
static Throwable |
invalidOrderingForConstantValuePartitionColumnError(StructType targetPartitionSchema) |
static Throwable |
invalidPartitionColumnError(String partKey,
StructType targetPartitionSchema) |
static Throwable |
invalidPartitionColumnKeyInTableError(String key,
String tblName) |
static Throwable |
invalidPartitionColumnTypeError(StructField column) |
static Throwable |
invalidPartitionSpecError(String details) |
static Throwable |
invalidPartitionSpecError(String specKeys,
scala.collection.Seq<String> partitionColumnNames,
String tableName) |
static Throwable |
invalidPartitionTransformationError(org.apache.spark.sql.catalyst.expressions.Expression expr) |
static Throwable |
invalidPatternError(String pattern,
String message) |
static Throwable |
invalidRepartitionExpressionsError(scala.collection.Seq<Object> sortOrders) |
static Throwable |
invalidSchemaStringError(org.apache.spark.sql.catalyst.expressions.Expression exp) |
static Throwable |
invalidStarUsageError(String prettyName) |
static Throwable |
invalidTimestampProvidedForStrategyError(String strategy,
String timeString) |
static Throwable |
invalidViewNameError(String viewName) |
static Throwable |
invalidYearMonthField(byte field) |
static Throwable |
invalidYearMonthIntervalType(String startFieldName,
String endFieldName) |
static Throwable |
joinConditionMissingOrTrivialError(org.apache.spark.sql.catalyst.plans.logical.Join join,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan left,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan right) |
static Throwable |
joinStrategyHintParameterNotSupportedError(Object unsupported) |
static Throwable |
keyValueInMapNotStringError(org.apache.spark.sql.catalyst.expressions.CreateMap m) |
static Throwable |
legacyStoreAssignmentPolicyError() |
static Throwable |
literalTypeUnsupportedForSourceTypeError(String field,
org.apache.spark.sql.catalyst.expressions.Expression source) |
static Throwable |
loadDataInputPathNotExistError(String path) |
static Throwable |
loadDataNotSupportedForDatasourceTablesError(String tableIdentWithDB) |
static Throwable |
loadDataNotSupportedForV2TablesError() |
static Throwable |
loadDataPartitionSizeNotMatchNumPartitionColumnsError(String tableIdentWithDB,
int partitionSize,
int targetTableSize) |
static Throwable |
loadDataTargetTableNotPartitionedButPartitionSpecWasProvidedError(String tableIdentWithDB) |
static Throwable |
loadDataWithoutPartitionSpecProvidedError(String tableIdentWithDB) |
static Throwable |
logicalPlanForViewNotAnalyzedError() |
static Throwable |
lookupFunctionInNonFunctionCatalogError(Identifier ident,
CatalogPlugin catalog) |
static Throwable |
mismatchedInsertedDataColumnNumberError(String tableName,
org.apache.spark.sql.catalyst.plans.logical.InsertIntoStatement insert,
scala.collection.immutable.Set<String> staticPartCols) |
static Throwable |
mismatchedTableBucketingError(String tableName,
String specifiedBucketString,
String existingBucketString) |
static Throwable |
mismatchedTableColumnNumberError(String tableName,
org.apache.spark.sql.catalyst.catalog.CatalogTable existingTable,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query) |
static Throwable |
mismatchedTableFormatError(String tableName,
Class<?> existingProvider,
Class<?> specifiedProvider) |
static Throwable |
mismatchedTableLocationError(org.apache.spark.sql.catalyst.TableIdentifier identifier,
org.apache.spark.sql.catalyst.catalog.CatalogTable existingTable,
org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc) |
static Throwable |
mismatchedTablePartitionColumnError(String tableName,
scala.collection.Seq<String> specifiedPartCols,
String existingPartCols) |
static Throwable |
missingFieldError(scala.collection.Seq<String> fieldName,
org.apache.spark.sql.catalyst.analysis.ResolvedTable table,
org.apache.spark.sql.catalyst.trees.Origin context) |
static Throwable |
missingStaticPartitionColumn(String staticName) |
static Throwable |
mixedRefsInAggFunc(String funcStr) |
static Throwable |
moreThanOneGeneratorError(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> generators,
String clause) |
static Throwable |
multiplePartitionColumnValuesSpecifiedError(StructField field,
scala.collection.immutable.Map<String,String> potentialSpecs) |
static Throwable |
multiTimeWindowExpressionsNotSupportedError(org.apache.spark.sql.catalyst.trees.TreeNode<?> t) |
static Throwable |
namespaceAlreadyExistsError(String[] namespace) |
static Throwable |
negativeScaleNotAllowedError(int scale) |
static Throwable |
nestedDatabaseUnsupportedByV1SessionCatalogError(String catalog) |
static Throwable |
nestedGeneratorError(org.apache.spark.sql.catalyst.expressions.Expression trimmedNestedGenerator) |
static Throwable |
noHandlerForUDAFError(String name) |
static Throwable |
nonDeterministicFilterInAggregateError() |
static Throwable |
nonLiteralPivotValError(org.apache.spark.sql.catalyst.expressions.Expression pivotVal) |
static Throwable |
nonMapFunctionNotAllowedError() |
static Throwable |
nonPartitionColError(String partitionName) |
static Throwable |
nonPartitionPruningPredicatesNotExpectedError(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> nonPartitionPruningPredicates) |
static Throwable |
noSuchFunctionError(org.apache.spark.sql.catalyst.FunctionIdentifier identifier) |
static Throwable |
noSuchNamespaceError(String[] namespace) |
static Throwable |
noSuchPartitionError(String db,
String table,
scala.collection.immutable.Map<String,String> partition) |
static Throwable |
noSuchStructFieldInGivenFieldsError(String fieldName,
StructField[] fields) |
static Throwable |
noSuchTableError(Identifier ident) |
static Throwable |
noSuchTableError(String db,
String table) |
static Throwable |
notAllowedToAddDBPrefixForTempViewError(String database) |
static Throwable |
notAllowedToCreatePermanentViewByReferencingTempFuncError(org.apache.spark.sql.catalyst.TableIdentifier name,
String funcName) |
static Throwable |
notAllowedToCreatePermanentViewByReferencingTempViewError(org.apache.spark.sql.catalyst.TableIdentifier name,
String nameParts) |
static Throwable |
notAllowedToCreatePermanentViewWithoutAssigningAliasForExpressionError(org.apache.spark.sql.catalyst.TableIdentifier name,
String attrName) |
static Throwable |
numberOfPartitionsNotAllowedWithUnspecifiedDistributionError() |
static Throwable |
operationNotSupportPartitioningError(String operation) |
static Throwable |
orcNotUsedWithHiveEnabledError() |
static Throwable |
orderByPositionRangeError(int index,
int size,
org.apache.spark.sql.catalyst.trees.TreeNode<?> t) |
static Throwable |
outerScopeFailureForNewInstanceError(String className) |
static Throwable |
outputPathAlreadyExistsError(org.apache.hadoop.fs.Path outputPath) |
static Throwable |
pandasUDFAggregateNotSupportedInPivotError() |
static Throwable |
parquetTypeUnsupportedYetError(String parquetType) |
static Throwable |
parseModeUnsupportedError(String funcName,
org.apache.spark.sql.catalyst.util.ParseMode mode) |
static Throwable |
partitionByDoesNotAllowedWhenUsingInsertIntoError() |
static Throwable |
partitionColumnNotFoundInSchemaError(String col,
String schemaCatalog) |
static Throwable |
partitionColumnNotSpecifiedError(String format,
String partitionColumn) |
static Throwable |
partitionNotSpecifyLocationUriError(String specString) |
static Throwable |
pathOptionNotSetCorrectlyWhenReadingError() |
static Throwable |
pathOptionNotSetCorrectlyWhenWritingError() |
static Throwable |
permanentViewNotSupportedByStreamingReadingAPIError(String quoted) |
static Throwable |
pivotValDataTypeMismatchError(org.apache.spark.sql.catalyst.expressions.Expression pivotVal,
org.apache.spark.sql.catalyst.expressions.Expression pivotCol) |
static Throwable |
portOptionNotSetError() |
static Throwable |
queryFromRawFilesIncludeCorruptRecordColumnError() |
static Throwable |
queryNameNotSpecifiedForMemorySinkError() |
static Throwable |
readNonStreamingTempViewError(String quoted) |
static Throwable |
recoverQueryFromCheckpointUnsupportedError(org.apache.hadoop.fs.Path checkpointPath) |
static Throwable |
recursiveViewDetectedError(org.apache.spark.sql.catalyst.TableIdentifier viewIdent,
scala.collection.Seq<org.apache.spark.sql.catalyst.TableIdentifier> newPath) |
static Throwable |
referenceColNotFoundForAlterTableChangesError(TableChange.After after,
String parentName) |
static Throwable |
renameColumnOnlySupportedWithV2TableError() |
static Throwable |
renameTableSourceAndDestinationMismatchError(String db,
String newDb) |
static Throwable |
renameTempViewToExistingViewError(String oldName,
String newName) |
static Throwable |
repairTableNotSupportedForV2TablesError() |
static Throwable |
replaceColumnsOnlySupportedWithV2TableError() |
static Throwable |
replaceTableAsSelectOnlySupportedWithV2TableError() |
static Throwable |
replaceTableOnlySupportedWithV2TableError() |
static Throwable |
requestedPartitionsMismatchTablePartitionsError(String tableName,
scala.collection.immutable.Map<String,scala.Option<String>> normalizedPartSpec,
StructType partColNames) |
static Throwable |
requiresSinglePartNamespaceError(scala.collection.Seq<String> ns) |
static Throwable |
resourceTypeNotSupportedError(String resourceType) |
static Throwable |
saveDataIntoViewNotAllowedError() |
static Throwable |
schemaIsNotStructTypeError(DataType dataType) |
static Throwable |
schemaNotFoldableError(org.apache.spark.sql.catalyst.expressions.Expression exp) |
static Throwable |
schemaNotSpecifiedForSchemaRelationProviderError(String className) |
static Throwable |
secondArgumentInFunctionIsNotBooleanLiteralError(String funcName) |
static Throwable |
secondArgumentNotDoubleLiteralError() |
static Throwable |
secondArgumentOfFunctionIsNotIntegerError(String function,
NumberFormatException e) |
static Throwable |
selectExprNotInGroupByError(org.apache.spark.sql.catalyst.expressions.Expression expr,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Alias> groupByAliases) |
static Throwable |
sessionWindowGapDurationDataTypeError(DataType dt) |
static Throwable |
setPathOptionAndCallWithPathParameterError(String method) |
static Throwable |
showColumnsNotSupportedForV2TablesError() |
static Throwable |
showColumnsWithConflictDatabasesError(scala.collection.Seq<String> db,
org.apache.spark.sql.catalyst.TableIdentifier v1TableName) |
static Throwable |
showCreateTableAsSerdeNotAllowedOnSparkDataSourceTableError(org.apache.spark.sql.catalyst.TableIdentifier table) |
static Throwable |
showCreateTableAsSerdeNotSupportedForV2TablesError() |
static Throwable |
showCreateTableFailToExecuteUnsupportedConfError(org.apache.spark.sql.catalyst.TableIdentifier table,
scala.collection.mutable.StringBuilder builder) |
static Throwable |
showCreateTableFailToExecuteUnsupportedFeatureError(org.apache.spark.sql.catalyst.catalog.CatalogTable table) |
static Throwable |
showCreateTableNotSupportedOnTempView(String table) |
static Throwable |
showCreateTableNotSupportTransactionalHiveTableError(org.apache.spark.sql.catalyst.catalog.CatalogTable table) |
static Throwable |
showCreateTableOrViewFailToExecuteUnsupportedFeatureError(org.apache.spark.sql.catalyst.catalog.CatalogTable table,
scala.collection.Seq<String> features) |
static Throwable |
showPartitionNotAllowedOnTableNotPartitionedError(String tableIdentWithDB) |
static Throwable |
singleTableStarInCountNotAllowedError(String targetString) |
static Throwable |
sortByNotUsedWithBucketByError() |
static Throwable |
sourceNotSupportedWithContinuousTriggerError(String source) |
static Throwable |
specifyingDBInCreateTempFuncError(String databaseName) |
static Throwable |
specifyingDBInDropTempFuncError(String databaseName) |
static Throwable |
specifyPartitionNotAllowedWhenTableSchemaNotDefinedError() |
static Throwable |
sqlOnlySupportedWithV1TablesError(String sql) |
static Throwable |
starExpandDataTypeNotSupportedError(scala.collection.Seq<String> attributes) |
static Throwable |
starNotAllowedWhenGroupByOrdinalPositionUsedError() |
static Throwable |
streamingIntoViewNotSupportedError(String viewName) |
static Throwable |
streamingSourcesDoNotSupportCommonExecutionModeError(scala.collection.Seq<String> microBatchSources,
scala.collection.Seq<String> continuousSources) |
static Throwable |
streamJoinStreamWithoutEqualityPredicateUnsupportedError(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan) |
static Throwable |
tableAlreadyExistsError(Identifier ident) |
static Throwable |
tableAlreadyExistsError(String table,
String guide) |
static Throwable |
tableAlreadyExistsError(org.apache.spark.sql.catalyst.TableIdentifier tableIdent) |
static Throwable |
tableDoesNotExistInDatabaseError(String tableName,
String dbName) |
static Throwable |
tableDoesNotSupportAtomicPartitionManagementError(Table table) |
static Throwable |
tableDoesNotSupportDeletesError(Table table) |
static Throwable |
tableDoesNotSupportPartitionManagementError(Table table) |
static Throwable |
tableDoesNotSupportReadsError(Table table) |
static Throwable |
tableDoesNotSupportTruncatesError(Table table) |
static Throwable |
tableDoesNotSupportWritesError(Table table) |
static Throwable |
tableIdentifierExistsError(org.apache.spark.sql.catalyst.TableIdentifier tableIdentifier) |
static Throwable |
tableIdentifierNotConvertedToHadoopFsRelationError(org.apache.spark.sql.catalyst.TableIdentifier tableIdentifier) |
static Throwable |
tableIsNotViewError(org.apache.spark.sql.catalyst.TableIdentifier name) |
static Throwable |
tableNotSpecifyDatabaseError(org.apache.spark.sql.catalyst.TableIdentifier identifier) |
static Throwable |
tableNotSpecifyLocationUriError(org.apache.spark.sql.catalyst.TableIdentifier identifier) |
static Throwable |
tableNotSupportStreamingWriteError(String tableName,
Table t) |
static Throwable |
tableOrViewAlreadyExistsError(String name) |
static Throwable |
tableOrViewNotFoundError(String table) |
static Throwable |
tableOrViewNotFoundInDatabaseError(String tableName,
String dbName) |
static Throwable |
tempViewNotCachedForAnalyzingColumnsError(org.apache.spark.sql.catalyst.TableIdentifier tableIdent) |
static Throwable |
tempViewNotSupportStreamingWriteError(String viewName) |
static Throwable |
textDataSourceWithMultiColumnsError(StructType schema) |
static Throwable |
truncateTableOnExternalTablesError(String tableIdentWithDB) |
static Throwable |
truncateTablePartitionNotSupportedForNotPartitionedTablesError(String tableIdentWithDB) |
static Throwable |
udfClassDoesNotImplementAnyUDFInterfaceError(String className) |
static Throwable |
udfClassNotAllowedToImplementMultiUDFInterfacesError(String className) |
static Throwable |
udfClassWithTooManyTypeArgumentsError(int n) |
static Throwable |
unexpectedEvalTypesForUDFsError(scala.collection.immutable.Set<Object> evalTypes) |
static Throwable |
unexpectedPartitionColumnPrefixError(String table,
String database,
String schemaColumns,
String specColumns) |
static Throwable |
unexpectedTypeOfRelationError(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan relation,
String tableName) |
static Throwable |
unfoldableFieldUnsupportedError() |
static Throwable |
unknownHiveResourceTypeError(String resourceType) |
static Throwable |
unorderablePivotColError(org.apache.spark.sql.catalyst.expressions.Expression pivotCol) |
static Throwable |
unrecognizedParquetTypeError(String field) |
static Throwable |
unresolvedUsingColForJoinError(String colName,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan,
String side) |
static Throwable |
unsetNonExistentPropertyError(String property,
org.apache.spark.sql.catalyst.TableIdentifier table) |
static Throwable |
unsupportedAbstractDataTypeForUpCastError(org.apache.spark.sql.types.AbstractDataType gotType) |
static Throwable |
unsupportedAppendInBatchModeError(Table table) |
static Throwable |
unsupportedBatchReadError(Table table) |
static Throwable |
unsupportedDataSourceTypeForDirectQueryOnFilesError(String className) |
static Throwable |
unsupportedDeleteByConditionWithSubqueryError(scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition) |
static Throwable |
unsupportedDynamicOverwriteInBatchModeError(Table table) |
static Throwable |
unsupportedIfNotExistsError(String tableName) |
static Throwable |
unsupportedJDBCNamespaceChangeInCatalogError(scala.collection.Seq<NamespaceChange> changes) |
static Throwable |
unsupportedMicroBatchOrContinuousScanError(Table table) |
static Throwable |
unsupportedOverwriteByFilterInBatchModeError(Table table) |
static Throwable |
unsupportedTableChangeInJDBCCatalogError(TableChange change) |
static Throwable |
unsupportedTableOperationError(Table table,
String cmd) |
static Throwable |
unsupportedTruncateInBatchModeError(Table table) |
static Throwable |
upCastFailureError(String fromStr,
org.apache.spark.sql.catalyst.expressions.Expression from,
DataType to,
scala.collection.Seq<String> walkedTypePath) |
static Throwable |
usePythonUDFInJoinConditionUnsupportedError(org.apache.spark.sql.catalyst.plans.JoinType joinType) |
static Throwable |
userDefinedPartitionNotFoundInJDBCRelationError(String columnName,
String schema) |
static Throwable |
userSpecifiedSchemaMismatchActualSchemaError(StructType schema,
StructType actualSchema) |
static Throwable |
userSpecifiedSchemaUnsupportedError(String operation) |
static Throwable |
usingUntypedScalaUDFError() |
static Throwable |
v2FunctionInvalidInputTypeLengthError(BoundFunction bound,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> args) |
static Throwable |
viewAlreadyExistsError(org.apache.spark.sql.catalyst.TableIdentifier name) |
static Throwable |
viewDepthExceedsMaxResolutionDepthError(org.apache.spark.sql.catalyst.TableIdentifier identifier,
int maxNestedViewDepth,
org.apache.spark.sql.catalyst.trees.TreeNode<?> t) |
static Throwable |
viewOutputNumberMismatchQueryColumnNamesError(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output,
scala.collection.Seq<String> queryColumnNames) |
static Throwable |
windowAggregateFunctionWithFilterNotSupportedError() |
static Throwable |
windowFrameNotMatchRequiredFrameError(org.apache.spark.sql.catalyst.expressions.SpecifiedWindowFrame f,
org.apache.spark.sql.catalyst.expressions.WindowFrame required) |
static Throwable |
windowFunctionInsideAggregateFunctionNotAllowedError() |
static Throwable |
windowFunctionNotAllowedError(String clauseName) |
static Throwable |
windowFunctionWithWindowFrameNotOrderedError(org.apache.spark.sql.catalyst.expressions.WindowFunction wf) |
static Throwable |
windowSpecificationNotDefinedError(String windowName) |
static Throwable |
writeEmptySchemasUnsupportedByDataSourceError() |
static Throwable |
writeIntoTempViewNotAllowedError(String quoted) |
static Throwable |
writeIntoV1TableNotAllowedError(org.apache.spark.sql.catalyst.TableIdentifier identifier,
org.apache.spark.sql.catalyst.trees.TreeNode<?> t) |
static Throwable |
writeIntoViewNotAllowedError(org.apache.spark.sql.catalyst.TableIdentifier identifier,
org.apache.spark.sql.catalyst.trees.TreeNode<?> t) |
static Throwable |
writeTableWithMismatchedColumnsError(int columnSize,
int outputSize,
org.apache.spark.sql.catalyst.trees.TreeNode<?> t) |
static Throwable |
writeWithSaveModeUnsupportedBySourceError(String source,
String createMode) |
public static Throwable groupingIDMismatchError(org.apache.spark.sql.catalyst.expressions.GroupingID groupingID, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupByExprs)
public static Throwable groupingColInvalidError(org.apache.spark.sql.catalyst.expressions.Expression groupingCol, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> groupByExprs)
public static Throwable groupingSizeTooLargeError(int sizeLimit)
public static Throwable unorderablePivotColError(org.apache.spark.sql.catalyst.expressions.Expression pivotCol)
public static Throwable nonLiteralPivotValError(org.apache.spark.sql.catalyst.expressions.Expression pivotVal)
public static Throwable pivotValDataTypeMismatchError(org.apache.spark.sql.catalyst.expressions.Expression pivotVal, org.apache.spark.sql.catalyst.expressions.Expression pivotCol)
public static Throwable unsupportedIfNotExistsError(String tableName)
public static Throwable nonPartitionColError(String partitionName)
public static Throwable missingStaticPartitionColumn(String staticName)
public static Throwable nestedGeneratorError(org.apache.spark.sql.catalyst.expressions.Expression trimmedNestedGenerator)
public static Throwable moreThanOneGeneratorError(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> generators, String clause)
public static Throwable generatorOutsideSelectError(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan)
public static Throwable legacyStoreAssignmentPolicyError()
public static Throwable unresolvedUsingColForJoinError(String colName, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan, String side)
public static Throwable dataTypeMismatchForDeserializerError(DataType dataType, String desiredType)
public static Throwable fieldNumberMismatchForDeserializerError(StructType schema, int maxOrdinal)
public static Throwable upCastFailureError(String fromStr, org.apache.spark.sql.catalyst.expressions.Expression from, DataType to, scala.collection.Seq<String> walkedTypePath)
public static Throwable unsupportedAbstractDataTypeForUpCastError(org.apache.spark.sql.types.AbstractDataType gotType)
public static Throwable outerScopeFailureForNewInstanceError(String className)
public static Throwable referenceColNotFoundForAlterTableChangesError(TableChange.After after, String parentName)
public static Throwable windowSpecificationNotDefinedError(String windowName)
public static Throwable selectExprNotInGroupByError(org.apache.spark.sql.catalyst.expressions.Expression expr, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Alias> groupByAliases)
public static Throwable groupingMustWithGroupingSetsOrCubeOrRollupError()
public static Throwable pandasUDFAggregateNotSupportedInPivotError()
public static Throwable aggregateExpressionRequiredForPivotError(String sql)
public static Throwable writeIntoTempViewNotAllowedError(String quoted)
public static Throwable expectTableOrPermanentViewNotTempViewError(String quoted, String cmd, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
public static Throwable readNonStreamingTempViewError(String quoted)
public static Throwable viewDepthExceedsMaxResolutionDepthError(org.apache.spark.sql.catalyst.TableIdentifier identifier, int maxNestedViewDepth, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
public static Throwable insertIntoViewNotAllowedError(org.apache.spark.sql.catalyst.TableIdentifier identifier, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
public static Throwable writeIntoViewNotAllowedError(org.apache.spark.sql.catalyst.TableIdentifier identifier, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
public static Throwable writeIntoV1TableNotAllowedError(org.apache.spark.sql.catalyst.TableIdentifier identifier, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
public static Throwable expectTableNotViewError(org.apache.spark.sql.catalyst.analysis.ResolvedView v, String cmd, scala.Option<String> mismatchHint, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
public static Throwable expectViewNotTableError(org.apache.spark.sql.catalyst.analysis.ResolvedTable v, String cmd, scala.Option<String> mismatchHint, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
public static Throwable permanentViewNotSupportedByStreamingReadingAPIError(String quoted)
public static Throwable starNotAllowedWhenGroupByOrdinalPositionUsedError()
public static Throwable invalidStarUsageError(String prettyName)
public static Throwable singleTableStarInCountNotAllowedError(String targetString)
public static Throwable orderByPositionRangeError(int index, int size, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
public static Throwable groupByPositionRefersToAggregateFunctionError(int index, org.apache.spark.sql.catalyst.expressions.Expression expr)
public static Throwable groupByPositionRangeError(int index, int size)
public static Throwable generatorNotExpectedError(org.apache.spark.sql.catalyst.FunctionIdentifier name, String classCanonicalName)
public static Throwable functionWithUnsupportedSyntaxError(String prettyName, String syntax)
public static Throwable nonDeterministicFilterInAggregateError()
public static Throwable aliasNumberNotMatchColumnNumberError(int columnSize, int outputSize, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
public static Throwable aliasesNumberNotMatchUDTFOutputError(int aliasesSize, String aliasesNames)
public static Throwable windowAggregateFunctionWithFilterNotSupportedError()
public static Throwable windowFunctionInsideAggregateFunctionNotAllowedError()
public static Throwable expressionWithoutWindowExpressionError(org.apache.spark.sql.catalyst.expressions.NamedExpression expr)
public static Throwable expressionWithMultiWindowExpressionsError(org.apache.spark.sql.catalyst.expressions.NamedExpression expr, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.WindowSpecDefinition> distinctWindowSpec)
public static Throwable windowFunctionNotAllowedError(String clauseName)
public static Throwable cannotSpecifyWindowFrameError(String prettyName)
public static Throwable windowFrameNotMatchRequiredFrameError(org.apache.spark.sql.catalyst.expressions.SpecifiedWindowFrame f, org.apache.spark.sql.catalyst.expressions.WindowFrame required)
public static Throwable windowFunctionWithWindowFrameNotOrderedError(org.apache.spark.sql.catalyst.expressions.WindowFunction wf)
public static Throwable cannotResolveUserSpecifiedColumnsError(String col, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
public static Throwable writeTableWithMismatchedColumnsError(int columnSize, int outputSize, org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
public static Throwable multiTimeWindowExpressionsNotSupportedError(org.apache.spark.sql.catalyst.trees.TreeNode<?> t)
public static Throwable sessionWindowGapDurationDataTypeError(DataType dt)
public static Throwable viewOutputNumberMismatchQueryColumnNamesError(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output, scala.collection.Seq<String> queryColumnNames)
public static Throwable attributeNotFoundError(String colName, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child)
public static Throwable cannotUpCastAsAttributeError(org.apache.spark.sql.catalyst.expressions.Attribute fromAttr, org.apache.spark.sql.catalyst.expressions.Attribute toAttr)
public static Throwable functionUndefinedError(org.apache.spark.sql.catalyst.FunctionIdentifier name)
public static Throwable invalidFunctionArgumentsError(String name, String expectedInfo, int actualNumber)
public static Throwable invalidFunctionArgumentNumberError(scala.collection.Seq<Object> validParametersCount, String name, scala.collection.Seq<Class<org.apache.spark.sql.catalyst.expressions.Expression>> params)
public static Throwable functionAcceptsOnlyOneArgumentError(String name)
public static Throwable alterV2TableSetLocationWithPartitionNotSupportedError()
public static Throwable joinStrategyHintParameterNotSupportedError(Object unsupported)
public static Throwable invalidHintParameterError(String hintName, scala.collection.Seq<Object> invalidParams)
public static Throwable invalidCoalesceHintParameterError(String hintName)
public static Throwable attributeNameSyntaxError(String name)
public static Throwable starExpandDataTypeNotSupportedError(scala.collection.Seq<String> attributes)
public static Throwable cannotResolveStarExpandGivenInputColumnsError(String targetString, String columns)
public static Throwable addColumnWithV1TableCannotSpecifyNotNullError()
public static Throwable replaceColumnsOnlySupportedWithV2TableError()
public static Throwable alterQualifiedColumnOnlySupportedWithV2TableError()
public static Throwable alterColumnWithV1TableCannotSpecifyNotNullError()
public static Throwable alterOnlySupportedWithV2TableError()
public static Throwable alterColumnCannotFindColumnInV1TableError(String colName, org.apache.spark.sql.connector.catalog.V1Table v1Table)
public static Throwable renameColumnOnlySupportedWithV2TableError()
public static Throwable dropColumnOnlySupportedWithV2TableError()
public static Throwable invalidDatabaseNameError(String quoted)
public static Throwable replaceTableOnlySupportedWithV2TableError()
public static Throwable replaceTableAsSelectOnlySupportedWithV2TableError()
public static Throwable cannotDropViewWithDropTableError()
public static Throwable showColumnsWithConflictDatabasesError(scala.collection.Seq<String> db, org.apache.spark.sql.catalyst.TableIdentifier v1TableName)
public static Throwable externalCatalogNotSupportShowViewsError(org.apache.spark.sql.catalyst.analysis.ResolvedNamespace resolved)
public static Throwable sqlOnlySupportedWithV1TablesError(String sql)
public static Throwable cannotCreateTableWithBothProviderAndSerdeError(scala.Option<String> provider, scala.Option<org.apache.spark.sql.catalyst.plans.logical.SerdeInfo> maybeSerdeInfo)
public static Throwable invalidFileFormatForStoredAsError(org.apache.spark.sql.catalyst.plans.logical.SerdeInfo serdeInfo)
public static Throwable commandNotSupportNestedColumnError(String command, String quoted)
public static Throwable columnDoesNotExistError(String colName)
public static Throwable renameTempViewToExistingViewError(String oldName, String newName)
public static Throwable databaseNotEmptyError(String db, String details)
public static Throwable invalidNameForTableOrDatabaseError(String name)
public static Throwable cannotCreateDatabaseWithSameNameAsPreservedDatabaseError(String database)
public static Throwable cannotDropDefaultDatabaseError()
public static Throwable cannotUsePreservedDatabaseAsCurrentDatabaseError(String database)
public static Throwable createExternalTableWithoutLocationError()
public static Throwable cannotOperateManagedTableWithExistingLocationError(String methodName, org.apache.spark.sql.catalyst.TableIdentifier tableIdentifier, org.apache.hadoop.fs.Path tableLocation)
public static Throwable dropNonExistentColumnsNotSupportedError(scala.collection.Seq<String> nonExistentColumnNames)
public static Throwable cannotRetrieveTableOrViewNotInSameDatabaseError(scala.collection.Seq<org.apache.spark.sql.catalyst.QualifiedTableName> qualifiedTableNames)
public static Throwable renameTableSourceAndDestinationMismatchError(String db, String newDb)
public static Throwable cannotRenameTempViewWithDatabaseSpecifiedError(org.apache.spark.sql.catalyst.TableIdentifier oldName, org.apache.spark.sql.catalyst.TableIdentifier newName)
public static Throwable cannotRenameTempViewToExistingTableError(org.apache.spark.sql.catalyst.TableIdentifier oldName, org.apache.spark.sql.catalyst.TableIdentifier newName)
public static Throwable invalidPartitionSpecError(String details)
public static Throwable functionAlreadyExistsError(org.apache.spark.sql.catalyst.FunctionIdentifier func)
public static Throwable cannotLoadClassWhenRegisteringFunctionError(String className, org.apache.spark.sql.catalyst.FunctionIdentifier func)
public static Throwable resourceTypeNotSupportedError(String resourceType)
public static Throwable tableNotSpecifyDatabaseError(org.apache.spark.sql.catalyst.TableIdentifier identifier)
public static Throwable tableNotSpecifyLocationUriError(org.apache.spark.sql.catalyst.TableIdentifier identifier)
public static Throwable partitionNotSpecifyLocationUriError(String specString)
public static Throwable invalidBucketNumberError(int bucketingMaxBuckets, int numBuckets)
public static Throwable corruptedTableNameContextInCatalogError(int numParts, int index)
public static Throwable corruptedViewSQLConfigsInCatalogError(Exception e)
public static Throwable corruptedViewQueryOutputColumnsInCatalogError(String numCols, int index)
public static Throwable corruptedViewReferredTempViewInCatalogError(Exception e)
public static Throwable corruptedViewReferredTempFunctionsInCatalogError(Exception e)
public static Throwable columnStatisticsDeserializationNotSupportedError(String name, DataType dataType)
public static Throwable columnStatisticsSerializationNotSupportedError(String colName, DataType dataType)
public static Throwable cannotReadCorruptedTablePropertyError(String key, String details)
public static Throwable invalidSchemaStringError(org.apache.spark.sql.catalyst.expressions.Expression exp)
public static Throwable schemaNotFoldableError(org.apache.spark.sql.catalyst.expressions.Expression exp)
public static Throwable schemaIsNotStructTypeError(DataType dataType)
public static Throwable keyValueInMapNotStringError(org.apache.spark.sql.catalyst.expressions.CreateMap m)
public static Throwable nonMapFunctionNotAllowedError()
public static Throwable invalidFieldTypeForCorruptRecordError()
public static Throwable dataTypeUnsupportedByClassError(DataType x, String className)
public static Throwable parseModeUnsupportedError(String funcName, org.apache.spark.sql.catalyst.util.ParseMode mode)
public static Throwable unfoldableFieldUnsupportedError()
public static Throwable literalTypeUnsupportedForSourceTypeError(String field, org.apache.spark.sql.catalyst.expressions.Expression source)
public static Throwable arrayComponentTypeUnsupportedError(Class<?> clz)
public static Throwable secondArgumentNotDoubleLiteralError()
public static Throwable dataTypeUnsupportedByExtractValueError(DataType dataType, org.apache.spark.sql.catalyst.expressions.Expression extraction, org.apache.spark.sql.catalyst.expressions.Expression child)
public static Throwable noHandlerForUDAFError(String name)
public static Throwable batchWriteCapabilityError(Table table, String v2WriteClassName, String v1WriteClassName)
public static Throwable unsupportedDeleteByConditionWithSubqueryError(scala.Option<org.apache.spark.sql.catalyst.expressions.Expression> condition)
public static Throwable cannotTranslateExpressionToSourceFilterError(org.apache.spark.sql.catalyst.expressions.Expression f)
public static Throwable cannotDeleteTableWhereFiltersError(Table table, Filter[] filters)
public static Throwable deleteOnlySupportedWithV2TablesError()
public static Throwable describeDoesNotSupportPartitionForV2TablesError()
public static Throwable cannotReplaceMissingTableError(Identifier tableIdentifier)
public static Throwable cannotReplaceMissingTableError(Identifier tableIdentifier, scala.Option<Throwable> cause)
public static Throwable unsupportedTableOperationError(Table table, String cmd)
public static Throwable unsupportedBatchReadError(Table table)
public static Throwable unsupportedMicroBatchOrContinuousScanError(Table table)
public static Throwable unsupportedAppendInBatchModeError(Table table)
public static Throwable unsupportedDynamicOverwriteInBatchModeError(Table table)
public static Throwable unsupportedTruncateInBatchModeError(Table table)
public static Throwable unsupportedOverwriteByFilterInBatchModeError(Table table)
public static Throwable streamingSourcesDoNotSupportCommonExecutionModeError(scala.collection.Seq<String> microBatchSources, scala.collection.Seq<String> continuousSources)
public static Throwable noSuchTableError(Identifier ident)
public static Throwable noSuchNamespaceError(String[] namespace)
public static Throwable tableAlreadyExistsError(Identifier ident)
public static Throwable requiresSinglePartNamespaceError(scala.collection.Seq<String> ns)
public static Throwable namespaceAlreadyExistsError(String[] namespace)
public static Throwable cannotCreateJDBCTableUsingProviderError()
public static Throwable cannotCreateJDBCTableUsingLocationError()
public static Throwable cannotCreateJDBCNamespaceUsingProviderError()
public static Throwable cannotCreateJDBCNamespaceWithPropertyError(String k)
public static Throwable cannotSetJDBCNamespaceWithPropertyError(String k)
public static Throwable cannotUnsetJDBCNamespaceWithPropertyError(String k)
public static Throwable unsupportedJDBCNamespaceChangeInCatalogError(scala.collection.Seq<NamespaceChange> changes)
public static Throwable tableDoesNotSupportReadsError(Table table)
public static Throwable tableDoesNotSupportWritesError(Table table)
public static Throwable tableDoesNotSupportDeletesError(Table table)
public static Throwable tableDoesNotSupportTruncatesError(Table table)
public static Throwable tableDoesNotSupportPartitionManagementError(Table table)
public static Throwable tableDoesNotSupportAtomicPartitionManagementError(Table table)
public static Throwable cannotRenameTableWithAlterViewError()
public static Throwable analyzeTableNotSupportedForV2TablesError()
public static Throwable alterTableRecoverPartitionsNotSupportedForV2TablesError()
public static Throwable alterTableSerDePropertiesNotSupportedForV2TablesError()
public static Throwable loadDataNotSupportedForV2TablesError()
public static Throwable showCreateTableAsSerdeNotSupportedForV2TablesError()
public static Throwable showColumnsNotSupportedForV2TablesError()
public static Throwable repairTableNotSupportedForV2TablesError()
public static Throwable databaseFromV1SessionCatalogNotSpecifiedError()
public static Throwable nestedDatabaseUnsupportedByV1SessionCatalogError(String catalog)
public static Throwable invalidRepartitionExpressionsError(scala.collection.Seq<Object> sortOrders)
public static Throwable partitionColumnNotSpecifiedError(String format, String partitionColumn)
public static Throwable dataSchemaNotSpecifiedError(String format)
public static Throwable dataPathNotExistError(String path)
public static Throwable dataSourceOutputModeUnsupportedError(String className, OutputMode outputMode)
public static Throwable schemaNotSpecifiedForSchemaRelationProviderError(String className)
public static Throwable userSpecifiedSchemaMismatchActualSchemaError(StructType schema, StructType actualSchema)
public static Throwable dataSchemaNotSpecifiedError(String format, String fileCatalog)
public static Throwable invalidDataSourceError(String className)
public static Throwable cannotSaveIntervalIntoExternalStorageError()
public static Throwable cannotResolveAttributeError(String name, String outputStr)
public static Throwable orcNotUsedWithHiveEnabledError()
public static Throwable failedToFindAvroDataSourceError(String provider)
public static Throwable failedToFindKafkaDataSourceError(String provider)
public static Throwable findMultipleDataSourceError(String provider, scala.collection.Seq<String> sourceNames)
public static Throwable writeEmptySchemasUnsupportedByDataSourceError()
public static Throwable insertMismatchedColumnNumberError(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> targetAttributes, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> sourceAttributes, int staticPartitionsSize)
public static Throwable insertMismatchedPartitionNumberError(StructType targetPartitionSchema, int providedPartitionsSize)
public static Throwable invalidPartitionColumnError(String partKey, StructType targetPartitionSchema)
public static Throwable multiplePartitionColumnValuesSpecifiedError(StructField field, scala.collection.immutable.Map<String,String> potentialSpecs)
public static Throwable invalidOrderingForConstantValuePartitionColumnError(StructType targetPartitionSchema)
public static Throwable cannotWriteDataToRelationsWithMultiplePathsError()
public static Throwable failedToRebuildExpressionError(Filter filter)
public static Throwable dataTypeUnsupportedByDataSourceError(String format, StructField field)
public static Throwable failToResolveDataSourceForTableError(org.apache.spark.sql.catalyst.catalog.CatalogTable table, String key)
public static Throwable outputPathAlreadyExistsError(org.apache.hadoop.fs.Path outputPath)
public static Throwable cannotUseDataTypeForPartitionColumnError(StructField field)
public static Throwable cannotUseAllColumnsForPartitionColumnsError()
public static Throwable partitionColumnNotFoundInSchemaError(String col, String schemaCatalog)
public static Throwable columnNotFoundInSchemaError(StructField col, scala.Option<StructType> tableSchema)
public static Throwable unsupportedDataSourceTypeForDirectQueryOnFilesError(String className)
public static Throwable saveDataIntoViewNotAllowedError()
public static Throwable mismatchedTableFormatError(String tableName, Class<?> existingProvider, Class<?> specifiedProvider)
public static Throwable mismatchedTableLocationError(org.apache.spark.sql.catalyst.TableIdentifier identifier, org.apache.spark.sql.catalyst.catalog.CatalogTable existingTable, org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc)
public static Throwable mismatchedTableColumnNumberError(String tableName, org.apache.spark.sql.catalyst.catalog.CatalogTable existingTable, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query)
public static Throwable cannotResolveColumnGivenInputColumnsError(String col, String inputColumns)
public static Throwable mismatchedTablePartitionColumnError(String tableName, scala.collection.Seq<String> specifiedPartCols, String existingPartCols)
public static Throwable mismatchedTableBucketingError(String tableName, String specifiedBucketString, String existingBucketString)
public static Throwable specifyPartitionNotAllowedWhenTableSchemaNotDefinedError()
public static Throwable bucketingColumnCannotBePartOfPartitionColumnsError(String bucketCol, scala.collection.Seq<String> normalizedPartCols)
public static Throwable bucketSortingColumnCannotBePartOfPartitionColumnsError(String sortCol, scala.collection.Seq<String> normalizedPartCols)
public static Throwable mismatchedInsertedDataColumnNumberError(String tableName, org.apache.spark.sql.catalyst.plans.logical.InsertIntoStatement insert, scala.collection.immutable.Set<String> staticPartCols)
public static Throwable requestedPartitionsMismatchTablePartitionsError(String tableName, scala.collection.immutable.Map<String,scala.Option<String>> normalizedPartSpec, StructType partColNames)
public static Throwable ddlWithoutHiveSupportEnabledError(String detail)
public static Throwable createTableColumnTypesOptionColumnNotFoundInSchemaError(String col, StructType schema)
public static Throwable parquetTypeUnsupportedYetError(String parquetType)
public static Throwable illegalParquetTypeError(String parquetType)
public static Throwable unrecognizedParquetTypeError(String field)
public static Throwable cannotConvertDataTypeToParquetTypeError(StructField field)
public static Throwable incompatibleViewSchemaChange(String viewName, String colName, int expectedNum, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> actualCols, scala.Option<String> viewDDL)
public static Throwable numberOfPartitionsNotAllowedWithUnspecifiedDistributionError()
public static Throwable cannotApplyTableValuedFunctionError(String name, String arguments, String usage, String details)
public static Throwable incompatibleRangeInputDataTypeError(org.apache.spark.sql.catalyst.expressions.Expression expression, DataType dataType)
public static Throwable groupAggPandasUDFUnsupportedByStreamingAggError()
public static Throwable streamJoinStreamWithoutEqualityPredicateUnsupportedError(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan)
public static Throwable cannotUseMixtureOfAggFunctionAndGroupAggPandasUDFError()
public static Throwable ambiguousAttributesInSelfJoinError(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.AttributeReference> ambiguousAttrs)
public static Throwable unexpectedEvalTypesForUDFsError(scala.collection.immutable.Set<Object> evalTypes)
public static Throwable ambiguousFieldNameError(scala.collection.Seq<String> fieldName, int numMatches, org.apache.spark.sql.catalyst.trees.Origin context)
public static Throwable cannotUseIntervalTypeInTableSchemaError()
public static Throwable cannotConvertBucketWithSortColumnsToTransformError(org.apache.spark.sql.catalyst.catalog.BucketSpec spec)
public static Throwable cannotConvertTransformsToPartitionColumnsError(scala.collection.Seq<Transform> nonIdTransforms)
public static Throwable cannotPartitionByNestedColumnError(NamedReference reference)
public static Throwable cannotUseCatalogError(CatalogPlugin plugin, String msg)
public static Throwable identifierHavingMoreThanTwoNamePartsError(String quoted, String identifier)
public static Throwable emptyMultipartIdentifierError()
public static Throwable functionUnsupportedInV2CatalogError()
public static Throwable cannotOperateOnHiveDataSourceFilesError(String operation)
public static Throwable setPathOptionAndCallWithPathParameterError(String method)
public static Throwable userSpecifiedSchemaUnsupportedError(String operation)
public static Throwable tempViewNotSupportStreamingWriteError(String viewName)
public static Throwable streamingIntoViewNotSupportedError(String viewName)
public static Throwable inputSourceDiffersFromDataSourceProviderError(String source, String tableName, org.apache.spark.sql.catalyst.catalog.CatalogTable table)
public static Throwable tableNotSupportStreamingWriteError(String tableName, Table t)
public static Throwable queryNameNotSpecifiedForMemorySinkError()
public static Throwable sourceNotSupportedWithContinuousTriggerError(String source)
public static Throwable columnNotFoundInExistingColumnsError(String columnType, String columnName, scala.collection.Seq<String> validColumnNames)
public static Throwable operationNotSupportPartitioningError(String operation)
public static Throwable mixedRefsInAggFunc(String funcStr)
public static Throwable lookupFunctionInNonFunctionCatalogError(Identifier ident, CatalogPlugin catalog)
public static Throwable functionCannotProcessInputError(UnboundFunction unbound, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> arguments, UnsupportedOperationException unsupported)
public static Throwable v2FunctionInvalidInputTypeLengthError(BoundFunction bound, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> args)
public static Throwable ambiguousRelationAliasNameInNestedCTEError(String name)
public static Throwable commandUnsupportedInV2TableError(String name)
public static Throwable cannotResolveColumnNameAmongAttributesError(String colName, String fieldNames)
public static Throwable cannotWriteTooManyColumnsToTableError(String tableName, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> expected, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query)
public static Throwable cannotWriteNotEnoughColumnsToTableError(String tableName, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> expected, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query)
public static Throwable cannotWriteIncompatibleDataToTableError(String tableName, scala.collection.Seq<String> errors)
public static Throwable secondArgumentOfFunctionIsNotIntegerError(String function, NumberFormatException e)
public static Throwable nonPartitionPruningPredicatesNotExpectedError(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> nonPartitionPruningPredicates)
public static Throwable columnNotDefinedInTableError(String colType, String colName, String tableName, scala.collection.Seq<String> tableCols)
public static Throwable invalidLiteralForWindowDurationError()
public static Throwable noSuchStructFieldInGivenFieldsError(String fieldName, StructField[] fields)
public static Throwable ambiguousReferenceToFieldsError(String fields)
public static Throwable secondArgumentInFunctionIsNotBooleanLiteralError(String funcName)
public static Throwable joinConditionMissingOrTrivialError(org.apache.spark.sql.catalyst.plans.logical.Join join, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan left, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan right)
public static Throwable usePythonUDFInJoinConditionUnsupportedError(org.apache.spark.sql.catalyst.plans.JoinType joinType)
public static Throwable conflictingAttributesInJoinConditionError(org.apache.spark.sql.catalyst.expressions.AttributeSet conflictingAttrs, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan outerPlan, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan subplan)
public static Throwable emptyWindowExpressionError(org.apache.spark.sql.catalyst.plans.logical.Window expr)
public static Throwable foundDifferentWindowFunctionTypeError(scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.NamedExpression> windowExpressions)
public static Throwable charOrVarcharTypeAsStringUnsupportedError()
public static Throwable invalidPatternError(String pattern, String message)
public static Throwable tableIdentifierExistsError(org.apache.spark.sql.catalyst.TableIdentifier tableIdentifier)
public static Throwable tableIdentifierNotConvertedToHadoopFsRelationError(org.apache.spark.sql.catalyst.TableIdentifier tableIdentifier)
public static Throwable alterDatabaseLocationUnsupportedError(String version)
public static Throwable hiveTableTypeUnsupportedError(String tableType)
public static Throwable hiveCreatePermanentFunctionsUnsupportedError()
public static Throwable unknownHiveResourceTypeError(String resourceType)
public static Throwable invalidDayTimeField(byte field)
public static Throwable invalidDayTimeIntervalType(String startFieldName, String endFieldName)
public static Throwable invalidYearMonthField(byte field)
public static Throwable invalidYearMonthIntervalType(String startFieldName, String endFieldName)
public static Throwable configRemovedInVersionError(String configName, String version, String comment)
public static Throwable failedFallbackParsingError(String msg, Throwable e1, Throwable e2)
public static Throwable decimalCannotGreaterThanPrecisionError(int scale, int precision)
public static Throwable decimalOnlySupportPrecisionUptoError(String decimalType, int precision)
public static Throwable negativeScaleNotAllowedError(int scale)
public static Throwable invalidPartitionColumnKeyInTableError(String key, String tblName)
public static Throwable invalidPartitionSpecError(String specKeys, scala.collection.Seq<String> partitionColumnNames, String tableName)
public static Throwable foundDuplicateColumnError(String colType, scala.collection.Seq<String> duplicateCol)
public static Throwable noSuchTableError(String db, String table)
public static Throwable tempViewNotCachedForAnalyzingColumnsError(org.apache.spark.sql.catalyst.TableIdentifier tableIdent)
public static Throwable columnTypeNotSupportStatisticsCollectionError(String name, org.apache.spark.sql.catalyst.TableIdentifier tableIdent, DataType dataType)
public static Throwable analyzeTableNotSupportedOnViewsError()
public static Throwable unexpectedPartitionColumnPrefixError(String table, String database, String schemaColumns, String specColumns)
public static Throwable noSuchPartitionError(String db, String table, scala.collection.immutable.Map<String,String> partition)
public static Throwable analyzingColumnStatisticsNotSupportedForColumnTypeError(String name, DataType dataType)
public static Throwable tableAlreadyExistsError(String table, String guide)
public static Throwable createTableAsSelectWithNonEmptyDirectoryError(String tablePath)
public static Throwable tableOrViewNotFoundError(String table)
public static Throwable unsetNonExistentPropertyError(String property, org.apache.spark.sql.catalyst.TableIdentifier table)
public static Throwable alterTableChangeColumnNotSupportedForColumnTypeError(StructField originColumn, StructField newColumn)
public static Throwable cannotFindColumnError(String name, String[] fieldNames)
public static Throwable alterTableSetSerdeForSpecificPartitionNotSupportedError()
public static Throwable alterTableSetSerdeNotSupportedError()
public static Throwable cmdOnlyWorksOnPartitionedTablesError(String cmd, String tableIdentWithDB)
public static Throwable cmdOnlyWorksOnTableWithLocationError(String cmd, String tableIdentWithDB)
public static Throwable actionNotAllowedOnTableWithFilesourcePartitionManagementDisabledError(String action, String tableName)
public static Throwable actionNotAllowedOnTableSincePartitionMetadataNotStoredError(String action, String tableName)
public static Throwable cannotAlterViewWithAlterTableError()
public static Throwable cannotAlterTableWithAlterViewError()
public static Throwable cannotOverwritePathBeingReadFromError()
public static Throwable createFuncWithBothIfNotExistsAndReplaceError()
public static Throwable defineTempFuncWithIfNotExistsError()
public static Throwable specifyingDBInCreateTempFuncError(String databaseName)
public static Throwable specifyingDBInDropTempFuncError(String databaseName)
public static Throwable cannotDropNativeFuncError(String functionName)
public static Throwable cannotRefreshBuiltInFuncError(String functionName)
public static Throwable cannotRefreshTempFuncError(String functionName)
public static Throwable noSuchFunctionError(org.apache.spark.sql.catalyst.FunctionIdentifier identifier)
public static Throwable alterAddColNotSupportViewError(org.apache.spark.sql.catalyst.TableIdentifier table)
public static Throwable alterAddColNotSupportDatasourceTableError(Object tableType, org.apache.spark.sql.catalyst.TableIdentifier table)
public static Throwable loadDataNotSupportedForDatasourceTablesError(String tableIdentWithDB)
public static Throwable loadDataWithoutPartitionSpecProvidedError(String tableIdentWithDB)
public static Throwable loadDataPartitionSizeNotMatchNumPartitionColumnsError(String tableIdentWithDB, int partitionSize, int targetTableSize)
public static Throwable loadDataTargetTableNotPartitionedButPartitionSpecWasProvidedError(String tableIdentWithDB)
public static Throwable loadDataInputPathNotExistError(String path)
public static Throwable truncateTableOnExternalTablesError(String tableIdentWithDB)
public static Throwable truncateTablePartitionNotSupportedForNotPartitionedTablesError(String tableIdentWithDB)
public static Throwable failToTruncateTableWhenRemovingDataError(String tableIdentWithDB, org.apache.hadoop.fs.Path path, Throwable e)
public static Throwable descPartitionNotAllowedOnTempView(String table)
public static Throwable descPartitionNotAllowedOnView(String table)
public static Throwable showPartitionNotAllowedOnTableNotPartitionedError(String tableIdentWithDB)
public static Throwable showCreateTableNotSupportedOnTempView(String table)
public static Throwable showCreateTableFailToExecuteUnsupportedFeatureError(org.apache.spark.sql.catalyst.catalog.CatalogTable table)
public static Throwable showCreateTableNotSupportTransactionalHiveTableError(org.apache.spark.sql.catalyst.catalog.CatalogTable table)
public static Throwable showCreateTableFailToExecuteUnsupportedConfError(org.apache.spark.sql.catalyst.TableIdentifier table, scala.collection.mutable.StringBuilder builder)
public static Throwable descPartitionNotAllowedOnViewError(String table)
public static Throwable showCreateTableAsSerdeNotAllowedOnSparkDataSourceTableError(org.apache.spark.sql.catalyst.TableIdentifier table)
public static Throwable showCreateTableOrViewFailToExecuteUnsupportedFeatureError(org.apache.spark.sql.catalyst.catalog.CatalogTable table, scala.collection.Seq<String> features)
public static Throwable createViewWithBothIfNotExistsAndReplaceError()
public static Throwable defineTempViewWithIfNotExistsError()
public static Throwable notAllowedToAddDBPrefixForTempViewError(String database)
public static Throwable logicalPlanForViewNotAnalyzedError()
public static Throwable createViewNumColumnsMismatchUserSpecifiedColumnLengthError(int analyzedPlanLength, int userSpecifiedColumnsLength)
public static Throwable tableIsNotViewError(org.apache.spark.sql.catalyst.TableIdentifier name)
public static Throwable viewAlreadyExistsError(org.apache.spark.sql.catalyst.TableIdentifier name)
public static Throwable createPersistedViewFromDatasetAPINotAllowedError()
public static Throwable recursiveViewDetectedError(org.apache.spark.sql.catalyst.TableIdentifier viewIdent, scala.collection.Seq<org.apache.spark.sql.catalyst.TableIdentifier> newPath)
public static Throwable notAllowedToCreatePermanentViewWithoutAssigningAliasForExpressionError(org.apache.spark.sql.catalyst.TableIdentifier name, String attrName)
public static Throwable notAllowedToCreatePermanentViewByReferencingTempViewError(org.apache.spark.sql.catalyst.TableIdentifier name, String nameParts)
public static Throwable notAllowedToCreatePermanentViewByReferencingTempFuncError(org.apache.spark.sql.catalyst.TableIdentifier name, String funcName)
public static Throwable queryFromRawFilesIncludeCorruptRecordColumnError()
public static Throwable userDefinedPartitionNotFoundInJDBCRelationError(String columnName, String schema)
public static Throwable invalidPartitionColumnTypeError(StructField column)
public static Throwable tableOrViewAlreadyExistsError(String name)
public static Throwable columnNameContainsInvalidCharactersError(String name)
public static Throwable textDataSourceWithMultiColumnsError(StructType schema)
public static Throwable cannotFindPartitionColumnInPartitionSchemaError(StructField readField, StructType partitionSchema)
public static Throwable cannotSpecifyDatabaseForTempViewError(org.apache.spark.sql.catalyst.TableIdentifier tableIdent)
public static Throwable cannotCreateTempViewUsingHiveDataSourceError()
public static Throwable invalidTimestampProvidedForStrategyError(String strategy, String timeString)
public static Throwable hostOptionNotSetError()
public static Throwable portOptionNotSetError()
public static Throwable invalidIncludeTimestampValueError()
public static Throwable checkpointLocationNotSpecifiedError()
public static Throwable recoverQueryFromCheckpointUnsupportedError(org.apache.hadoop.fs.Path checkpointPath)
public static Throwable cannotFindColumnInRelationOutputError(String colName, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan relation)
public static Throwable invalidBoundaryStartError(long start)
public static Throwable invalidBoundaryEndError(long end)
public static Throwable databaseDoesNotExistError(String dbName)
public static Throwable tableDoesNotExistInDatabaseError(String tableName, String dbName)
public static Throwable tableOrViewNotFoundInDatabaseError(String tableName, String dbName)
public static Throwable unexpectedTypeOfRelationError(org.apache.spark.sql.catalyst.plans.logical.LogicalPlan relation, String tableName)
public static Throwable unsupportedTableChangeInJDBCCatalogError(TableChange change)
public static Throwable pathOptionNotSetCorrectlyWhenReadingError()
public static Throwable pathOptionNotSetCorrectlyWhenWritingError()
public static Throwable writeWithSaveModeUnsupportedBySourceError(String source, String createMode)
public static Throwable partitionByDoesNotAllowedWhenUsingInsertIntoError()
public static Throwable cannotFindCatalogToHandleIdentifierError(String quote)
public static Throwable sortByNotUsedWithBucketByError()
public static Throwable bucketByUnsupportedByOperationError(String operation)
public static Throwable bucketByAndSortByUnsupportedByOperationError(String operation)
public static Throwable tableAlreadyExistsError(org.apache.spark.sql.catalyst.TableIdentifier tableIdent)
public static Throwable cannotOverwriteTableThatIsBeingReadFromError(String tableName)
public static Throwable invalidPartitionTransformationError(org.apache.spark.sql.catalyst.expressions.Expression expr)
public static AnalysisException cannotResolveColumnNameAmongFieldsError(String colName, String fieldsStr, String extraMsg)
public static Throwable cannotParseIntervalError(String delayThreshold, Throwable e)
public static Throwable invalidJoinTypeInJoinWithError(org.apache.spark.sql.catalyst.plans.JoinType joinType)
public static Throwable cannotPassTypedColumnInUntypedSelectError(String typedCol)
public static Throwable invalidViewNameError(String viewName)
public static Throwable invalidBucketsNumberError(String numBuckets, String e)
public static Throwable usingUntypedScalaUDFError()
public static Throwable aggregationFunctionAppliedOnNonNumericColumnError(String colName)
public static Throwable aggregationFunctionAppliedOnNonNumericColumnError(String pivotColumn, int maxValues)
public static Throwable cannotModifyValueOfStaticConfigError(String key)
public static Throwable cannotModifyValueOfSparkConfigError(String key)
public static Throwable commandExecutionInRunnerUnsupportedError(String runner)
public static Throwable udfClassDoesNotImplementAnyUDFInterfaceError(String className)
public static Throwable udfClassNotAllowedToImplementMultiUDFInterfacesError(String className)
public static Throwable udfClassWithTooManyTypeArgumentsError(int n)
public static Throwable classWithoutPublicNonArgumentConstructorError(String className)
public static Throwable cannotLoadClassNotOnClassPathError(String className)
public static Throwable classDoesNotImplementUserDefinedAggregateFunctionError(String className)
public static Throwable missingFieldError(scala.collection.Seq<String> fieldName, org.apache.spark.sql.catalyst.analysis.ResolvedTable table, org.apache.spark.sql.catalyst.trees.Origin context)
public static Throwable invalidFieldName(scala.collection.Seq<String> fieldName, scala.collection.Seq<String> path, org.apache.spark.sql.catalyst.trees.Origin context)