public class CreateHiveTableAsSelectCommand
extends org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
implements org.apache.spark.sql.execution.command.DataWritingCommand, scala.Product, scala.Serializable
param: tableDesc the Table Describe, which may contain serde, storage handler etc. param: query the query whose result will be insert into the new relation param: mode SaveMode
Constructor and Description |
---|
CreateHiveTableAsSelectCommand(org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc,
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query,
scala.collection.Seq<String> outputColumnNames,
SaveMode mode) |
Modifier and Type | Method and Description |
---|---|
static org.apache.spark.sql.catalyst.expressions.AttributeSeq |
allAttributes() |
static boolean |
analyzed() |
static org.apache.spark.sql.catalyst.trees.TreeNode<?> |
apply(int number) |
String |
argString() |
static String |
asCode() |
static org.apache.spark.sql.execution.datasources.BasicWriteJobStatsTracker |
basicWriteJobStatsTracker(org.apache.hadoop.conf.Configuration hadoopConf) |
abstract static boolean |
canEqual(Object that) |
static PlanType |
canonicalized() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> |
children() |
static boolean |
childrenResolved() |
static <B> scala.collection.Seq<B> |
collect(scala.PartialFunction<BaseType,B> pf) |
static <B> scala.Option<B> |
collectFirst(scala.PartialFunction<BaseType,B> pf) |
static scala.collection.Seq<BaseType> |
collectLeaves() |
static org.apache.spark.sql.internal.SQLConf |
conf() |
static org.apache.spark.sql.catalyst.expressions.ExpressionSet |
constraints() |
static scala.collection.immutable.Set<org.apache.spark.sql.catalyst.expressions.Expression> |
constructIsNotNullConstraints(scala.collection.immutable.Set<org.apache.spark.sql.catalyst.expressions.Expression> constraints,
scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output) |
static scala.collection.immutable.Set<org.apache.spark.sql.catalyst.trees.TreeNode<?>> |
containsChild() |
abstract static boolean |
equals(Object that) |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> |
expressions() |
static boolean |
fastEquals(org.apache.spark.sql.catalyst.trees.TreeNode<?> other) |
static scala.Option<BaseType> |
find(scala.Function1<BaseType,Object> f) |
static <A> scala.collection.Seq<A> |
flatMap(scala.Function1<BaseType,scala.collection.TraversableOnce<A>> f) |
static void |
foreach(scala.Function1<BaseType,scala.runtime.BoxedUnit> f) |
static void |
foreachUp(scala.Function1<BaseType,scala.runtime.BoxedUnit> f) |
static scala.collection.mutable.StringBuilder |
generateTreeString(int depth,
scala.collection.Seq<Object> lastChildren,
scala.collection.mutable.StringBuilder builder,
boolean verbose,
String prefix,
boolean addSuffix) |
static String |
generateTreeString$default$5() |
static boolean |
generateTreeString$default$6() |
static int |
hashCode() |
static scala.collection.immutable.Set<org.apache.spark.sql.catalyst.expressions.Expression> |
inferAdditionalConstraints(scala.collection.immutable.Set<org.apache.spark.sql.catalyst.expressions.Expression> constraints) |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
inputSet() |
static void |
invalidateStatsCache() |
static boolean |
isStreaming() |
static BaseType |
makeCopy(Object[] newArgs) |
static <A> scala.collection.Seq<A> |
map(scala.Function1<BaseType,A> f) |
static BaseType |
mapChildren(scala.Function1<BaseType,BaseType> f) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
mapExpressions(scala.Function1<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> f) |
static scala.Option<Object> |
maxRows() |
static scala.Option<Object> |
maxRowsPerPartition() |
static scala.collection.immutable.Map<String,org.apache.spark.sql.execution.metric.SQLMetric> |
metrics() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
missingInput() |
SaveMode |
mode() |
static String |
nodeName() |
static String |
numberedTreeString() |
static org.apache.spark.sql.catalyst.trees.Origin |
origin() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
output() |
scala.collection.Seq<String> |
outputColumnNames() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
outputColumns() |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> |
outputOrdering() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
outputSet() |
static BaseType |
p(int number) |
static String |
prettyJson() |
static void |
printSchema() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
producedAttributes() |
abstract static int |
productArity() |
abstract static Object |
productElement(int n) |
static scala.collection.Iterator<Object> |
productIterator() |
static String |
productPrefix() |
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
query() |
static org.apache.spark.sql.catalyst.expressions.AttributeSet |
references() |
static void |
refresh() |
static scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression> |
resolve(scala.collection.Seq<String> nameParts,
scala.Function2<String,String,Object> resolver) |
static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> |
resolve(StructType schema,
scala.Function2<String,String,Object> resolver) |
static scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression> |
resolveChildren(scala.collection.Seq<String> nameParts,
scala.Function2<String,String,Object> resolver) |
static boolean |
resolved() |
static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
resolveExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> r) |
static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
resolveOperators(scala.PartialFunction<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> rule) |
static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
resolveOperatorsDown(scala.PartialFunction<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> rule) |
static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
resolveOperatorsUp(scala.PartialFunction<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> rule) |
static scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression> |
resolveQuoted(String name,
scala.Function2<String,String,Object> resolver) |
scala.collection.Seq<Row> |
run(SparkSession sparkSession,
org.apache.spark.sql.execution.SparkPlan child) |
static boolean |
sameResult(PlanType other) |
static StructType |
schema() |
static String |
schemaString() |
static int |
semanticHash() |
static String |
simpleString() |
static org.apache.spark.sql.catalyst.plans.logical.Statistics |
stats() |
static scala.collection.Seq<PlanType> |
subqueries() |
org.apache.spark.sql.catalyst.catalog.CatalogTable |
tableDesc() |
static String |
toJSON() |
static String |
toString() |
static BaseType |
transform(scala.PartialFunction<BaseType,BaseType> rule) |
static org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper |
transformAllExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
transformDown(scala.PartialFunction<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformExpressionsDown(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> |
transformExpressionsUp(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule) |
static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan |
transformUp(scala.PartialFunction<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> rule) |
static String |
treeString() |
static String |
treeString(boolean verbose,
boolean addSuffix) |
static boolean |
treeString$default$2() |
static String |
verboseString() |
static String |
verboseStringWithSuffix() |
static BaseType |
withNewChildren(scala.collection.Seq<BaseType> newChildren) |
analyzed, assertNotAnalysisRule, childrenResolved, constraints, constructIsNotNullConstraints, inferAdditionalConstraints, initializeLogIfNecessary, initializeLogIfNecessary, initializeLogIfNecessary$default$2, invalidateStatsCache, isStreaming, isTraceEnabled, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning, maxRows, maxRowsPerPartition, org$apache$spark$internal$Logging$$log__$eq, org$apache$spark$internal$Logging$$log_, org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$_analyzed_$eq, org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$_analyzed, outputOrdering, refresh, resolve, resolve, resolveChildren, resolved, resolveExpressions, resolveOperators, resolveOperatorsDown, resolveOperatorsUp, resolveQuoted, setAnalyzed, statePrefix, stats, statsCache_$eq, statsCache, transformAllExpressions, transformDown, transformUp, validConstraints, verboseStringWithSuffix
allAttributes, canEvaluate, canEvaluateWithinJoin, canonicalized, conf, doCanonicalize, expressions, innerChildren, inputSet, isCanonicalizedPlan, mapExpressions, missingInput, normalizeExprId, normalizePredicates, org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$1, org$apache$spark$sql$catalyst$plans$QueryPlan$$seqToExpressions$1, output, outputSet, printSchema, producedAttributes, references, replaceAlias, sameResult, schema, schemaString, semanticHash, simpleString, splitConjunctivePredicates, splitDisjunctivePredicates, subqueries, transformExpressions, transformExpressionsDown, transformExpressionsUp, verboseString
apply, asCode, children, collect, collectFirst, collectLeaves, containsChild, fastEquals, find, flatMap, foreach, foreachUp, generateTreeString, generateTreeString$default$5, generateTreeString$default$6, hashCode, jsonFields, makeCopy, map, mapChildren, mapProductIterator, nodeName, numberedTreeString, org$apache$spark$sql$catalyst$trees$TreeNode$$allChildren, org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1, org$apache$spark$sql$catalyst$trees$TreeNode$$getNodeNumbered, org$apache$spark$sql$catalyst$trees$TreeNode$$mapChild$1, org$apache$spark$sql$catalyst$trees$TreeNode$$mapChild$2, org$apache$spark$sql$catalyst$trees$TreeNode$$mapTreeNode$1, org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson, origin, otherCopyArgs, p, prettyJson, productIterator, productPrefix, stringArgs, toJSON, toString, transform, treeString, treeString, treeString$default$2, withNewChildren
basicWriteJobStatsTracker, children, metrics, outputColumns
productArity, productElement, productIterator, productPrefix
initializeLogging, log_
public CreateHiveTableAsSelectCommand(org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query, scala.collection.Seq<String> outputColumnNames, SaveMode mode)
public abstract static boolean canEqual(Object that)
public abstract static boolean equals(Object that)
public abstract static Object productElement(int n)
public abstract static int productArity()
public static scala.collection.Iterator<Object> productIterator()
public static String productPrefix()
public static org.apache.spark.sql.catalyst.trees.Origin origin()
public static scala.collection.immutable.Set<org.apache.spark.sql.catalyst.trees.TreeNode<?>> containsChild()
public static int hashCode()
public static boolean fastEquals(org.apache.spark.sql.catalyst.trees.TreeNode<?> other)
public static scala.Option<BaseType> find(scala.Function1<BaseType,Object> f)
public static void foreach(scala.Function1<BaseType,scala.runtime.BoxedUnit> f)
public static void foreachUp(scala.Function1<BaseType,scala.runtime.BoxedUnit> f)
public static <A> scala.collection.Seq<A> map(scala.Function1<BaseType,A> f)
public static <A> scala.collection.Seq<A> flatMap(scala.Function1<BaseType,scala.collection.TraversableOnce<A>> f)
public static <B> scala.collection.Seq<B> collect(scala.PartialFunction<BaseType,B> pf)
public static scala.collection.Seq<BaseType> collectLeaves()
public static <B> scala.Option<B> collectFirst(scala.PartialFunction<BaseType,B> pf)
public static BaseType withNewChildren(scala.collection.Seq<BaseType> newChildren)
public static BaseType transform(scala.PartialFunction<BaseType,BaseType> rule)
public static BaseType mapChildren(scala.Function1<BaseType,BaseType> f)
public static BaseType makeCopy(Object[] newArgs)
public static String nodeName()
public static String toString()
public static String treeString()
public static String treeString(boolean verbose, boolean addSuffix)
public static String numberedTreeString()
public static org.apache.spark.sql.catalyst.trees.TreeNode<?> apply(int number)
public static BaseType p(int number)
public static scala.collection.mutable.StringBuilder generateTreeString(int depth, scala.collection.Seq<Object> lastChildren, scala.collection.mutable.StringBuilder builder, boolean verbose, String prefix, boolean addSuffix)
public static String asCode()
public static String toJSON()
public static String prettyJson()
public static boolean treeString$default$2()
public static String generateTreeString$default$5()
public static boolean generateTreeString$default$6()
public static org.apache.spark.sql.internal.SQLConf conf()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet outputSet()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet references()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet inputSet()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet producedAttributes()
public static org.apache.spark.sql.catalyst.expressions.AttributeSet missingInput()
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformExpressionsDown(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> transformExpressionsUp(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.QueryPlan<PlanType> mapExpressions(scala.Function1<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> f)
public static final scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Expression> expressions()
public static StructType schema()
public static String schemaString()
public static void printSchema()
public static String simpleString()
public static String verboseString()
public static scala.collection.Seq<PlanType> subqueries()
public static final PlanType canonicalized()
public static final boolean sameResult(PlanType other)
public static final int semanticHash()
public static org.apache.spark.sql.catalyst.expressions.AttributeSeq allAttributes()
public static boolean analyzed()
public static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan resolveOperators(scala.PartialFunction<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> rule)
public static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan resolveOperatorsUp(scala.PartialFunction<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> rule)
public static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan resolveOperatorsDown(scala.PartialFunction<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> rule)
public static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan resolveExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> r)
public static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan transformDown(scala.PartialFunction<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> rule)
public static org.apache.spark.sql.catalyst.plans.logical.LogicalPlan transformUp(scala.PartialFunction<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan,org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> rule)
public static org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper transformAllExpressions(scala.PartialFunction<org.apache.spark.sql.catalyst.expressions.Expression,org.apache.spark.sql.catalyst.expressions.Expression> rule)
public static org.apache.spark.sql.catalyst.plans.logical.Statistics stats()
public static final void invalidateStatsCache()
public static scala.collection.immutable.Set<org.apache.spark.sql.catalyst.expressions.Expression> inferAdditionalConstraints(scala.collection.immutable.Set<org.apache.spark.sql.catalyst.expressions.Expression> constraints)
public static scala.collection.immutable.Set<org.apache.spark.sql.catalyst.expressions.Expression> constructIsNotNullConstraints(scala.collection.immutable.Set<org.apache.spark.sql.catalyst.expressions.Expression> constraints, scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output)
public static org.apache.spark.sql.catalyst.expressions.ExpressionSet constraints()
public static boolean isStreaming()
public static String verboseStringWithSuffix()
public static scala.Option<Object> maxRows()
public static scala.Option<Object> maxRowsPerPartition()
public static boolean resolved()
public static boolean childrenResolved()
public static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> resolve(StructType schema, scala.Function2<String,String,Object> resolver)
public static scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression> resolveChildren(scala.collection.Seq<String> nameParts, scala.Function2<String,String,Object> resolver)
public static scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression> resolve(scala.collection.Seq<String> nameParts, scala.Function2<String,String,Object> resolver)
public static scala.Option<org.apache.spark.sql.catalyst.expressions.NamedExpression> resolveQuoted(String name, scala.Function2<String,String,Object> resolver)
public static void refresh()
public static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.SortOrder> outputOrdering()
public static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> output()
public static final scala.collection.Seq<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan> children()
public static scala.collection.Seq<org.apache.spark.sql.catalyst.expressions.Attribute> outputColumns()
public static scala.collection.immutable.Map<String,org.apache.spark.sql.execution.metric.SQLMetric> metrics()
public static org.apache.spark.sql.execution.datasources.BasicWriteJobStatsTracker basicWriteJobStatsTracker(org.apache.hadoop.conf.Configuration hadoopConf)
public org.apache.spark.sql.catalyst.catalog.CatalogTable tableDesc()
public org.apache.spark.sql.catalyst.plans.logical.LogicalPlan query()
query
in interface org.apache.spark.sql.execution.command.DataWritingCommand
public scala.collection.Seq<String> outputColumnNames()
outputColumnNames
in interface org.apache.spark.sql.execution.command.DataWritingCommand
public SaveMode mode()
public scala.collection.Seq<Row> run(SparkSession sparkSession, org.apache.spark.sql.execution.SparkPlan child)
run
in interface org.apache.spark.sql.execution.command.DataWritingCommand
public String argString()
argString
in class org.apache.spark.sql.catalyst.trees.TreeNode<org.apache.spark.sql.catalyst.plans.logical.LogicalPlan>