public class CreateTableAsSelect extends SparkPlan implements LeafNode, Command, scala.Product, scala.Serializable
Constructor and Description |
---|
CreateTableAsSelect(String database,
String tableName,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query,
boolean allowExisting,
scala.Option<org.apache.hadoop.hive.ql.plan.CreateTableDesc> desc) |
Modifier and Type | Method and Description |
---|---|
boolean |
allowExisting() |
String |
argString() |
String |
database() |
scala.Option<org.apache.hadoop.hive.ql.plan.CreateTableDesc> |
desc() |
RDD<org.apache.spark.sql.catalyst.expressions.Row> |
execute()
Runs this query returning the result as an RDD.
|
scala.collection.Seq<scala.runtime.Nothing$> |
output() |
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
query() |
String |
tableName() |
codegenEnabled, executeCollect, makeCopy, outputPartitioning, requiredChildDistribution
expressions, inputSet, missingInput, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionDown$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$transformExpressionUp$1, outputSet, printSchema, references, schema, schemaString, simpleString, statePrefix, transformAllExpressions, transformExpressions, transformExpressionsDown, transformExpressionsUp
apply, asCode, children, collect, fastEquals, flatMap, foreach, generateTreeString, getNodeNumbered, map, mapChildren, nodeName, numberedTreeString, otherCopyArgs, stringArgs, toString, transform, transformChildrenDown, transformChildrenUp, transformDown, transformUp, treeString, withNewChildren
executeCollect
productArity, productElement, productIterator, productPrefix
initializeIfNecessary, initializeLogging, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public CreateTableAsSelect(String database, String tableName, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query, boolean allowExisting, scala.Option<org.apache.hadoop.hive.ql.plan.CreateTableDesc> desc)
public String database()
public String tableName()
public org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query()
public boolean allowExisting()
public scala.Option<org.apache.hadoop.hive.ql.plan.CreateTableDesc> desc()
public scala.collection.Seq<scala.runtime.Nothing$> output()
public RDD<org.apache.spark.sql.catalyst.expressions.Row> execute()
SparkPlan
public String argString()
argString
in class org.apache.spark.sql.catalyst.trees.TreeNode<SparkPlan>