public class Table extends Object implements Serializable
| Modifier and Type | Class and Description |
|---|---|
class |
Table.ValidationFailureSemanticException
Marker SemanticException, so that processing that allows for table validation failures
and appropriately handles them can recover from these types of SemanticExceptions
|
| Constructor and Description |
|---|
Table()
Used only for serialization.
|
Table(String databaseName,
String tableName) |
Table(Table table) |
public Table()
public Table(Table table)
protected void initialize(Table table)
public boolean isDummyTable()
public Table getTTable()
public void setTTable(Table tTable)
public static Table getEmptyTable(String databaseName, String tableName)
public void checkValidity()
throws HiveException
HiveExceptionpublic StorageDescriptor getSd()
public void setInputFormatClass(Class<? extends org.apache.hadoop.mapred.InputFormat> inputFormatClass)
public void setOutputFormatClass(Class<? extends org.apache.hadoop.mapred.OutputFormat> outputFormatClass)
public final Properties getMetadata()
public final org.apache.hadoop.fs.Path getPath()
public final String getTableName()
public final org.apache.hadoop.fs.Path getDataLocation()
public final Deserializer getDeserializer()
public final Class<? extends Deserializer> getDeserializerClass() throws Exception
Exceptionpublic final Deserializer getDeserializer(boolean skipConfError)
public final Deserializer getDeserializerFromMetaStore(boolean skipConfError)
public HiveStorageHandler getStorageHandler()
public final Class<? extends org.apache.hadoop.mapred.InputFormat> getInputFormatClass()
public final Class<? extends org.apache.hadoop.mapred.OutputFormat> getOutputFormatClass()
public final void validatePartColumnNames(Map<String,String> spec, boolean shouldBeFull) throws SemanticException
SemanticExceptionpublic boolean isImmutable()
public void setTableType(TableType tableType)
public TableType getTableType()
public ArrayList<StructField> getFields()
public StructField getField(String fld)
public List<FieldSchema> getPartCols()
public boolean isPartitionKey(String colName)
public String getBucketingDimensionId()
public void setDataLocation(org.apache.hadoop.fs.Path path)
public void unsetDataLocation()
public void setBucketCols(List<String> bucketCols) throws HiveException
HiveExceptionpublic void setSortCols(List<Order> sortOrder) throws HiveException
HiveExceptionpublic void setSkewedValueLocationMap(List<String> valList, String dirName) throws HiveException
HiveExceptionpublic void setSkewedColValues(List<List<String>> skewedValues) throws HiveException
HiveExceptionpublic void setSkewedColNames(List<String> skewedColNames) throws HiveException
HiveExceptionpublic SkewedInfo getSkewedInfo()
public void setSkewedInfo(SkewedInfo skewedInfo) throws HiveException
HiveExceptionpublic boolean isStoredAsSubDirectories()
public void setStoredAsSubDirectories(boolean storedAsSubDirectories)
throws HiveException
HiveExceptionpublic List<FieldSchema> getCols()
public List<FieldSchema> getAllCols()
public void setPartCols(List<FieldSchema> partCols)
public String getDbName()
public int getNumBuckets()
public void setInputFormatClass(String name) throws HiveException
HiveExceptionpublic void setOutputFormatClass(String name) throws HiveException
HiveExceptionpublic boolean isPartitioned()
public void setFields(List<FieldSchema> fields)
public void setNumBuckets(int nb)
public String getOwner()
Table.getOwner()public Map<String,String> getParameters()
Table.getParameters()public int getRetention()
Table.getRetention()public void setOwner(String owner)
owner - Table.setOwner(java.lang.String)public void setRetention(int retention)
retention - Table.setRetention(int)public void setSerializationLib(String lib)
public String getSerializationLib()
public void setTableName(String tableName)
public void setDbName(String databaseName)
public List<FieldSchema> getPartitionKeys()
public String getViewOriginalText()
public void setViewOriginalText(String viewOriginalText)
viewOriginalText - the original view text to setpublic String getViewExpandedText()
public void clearSerDeInfo()
public void setViewExpandedText(String viewExpandedText)
viewExpandedText - the expanded view text to setpublic boolean isView()
public boolean isIndexTable()
public LinkedHashMap<String,String> createSpec(Partition tp)
tp - Use the information from this partition.public Table copy() throws HiveException
HiveExceptionpublic void setCreateTime(int createTime)
public int getLastAccessTime()
public void setLastAccessTime(int lastAccessTime)
public boolean isNonNative()
public void setProtectMode(ProtectMode protectMode)
protectMode - public ProtectMode getProtectMode()
public boolean isOffline()
public boolean canDrop()
public boolean canWrite()
public String getCompleteName()
public org.apache.hadoop.fs.FileStatus[] getSortedPaths()
public boolean isTemporary()
public static boolean hasMetastoreBasedSchema(HiveConf conf, StorageDescriptor serde)
public static boolean hasMetastoreBasedSchema(HiveConf conf, String serdeLib)
public static void validateColumns(List<FieldSchema> columns, List<FieldSchema> partCols) throws HiveException
HiveExceptionpublic BaseSemanticAnalyzer.TableSpec getTableSpec()
public void setTableSpec(BaseSemanticAnalyzer.TableSpec tableSpec)
Copyright © 2017 The Apache Software Foundation. All rights reserved.