public class HiveTableUtil extends Object
Modifier and Type | Method and Description |
---|---|
static void |
alterColumns(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
CatalogTable catalogTable) |
static org.apache.hadoop.hive.metastore.api.Table |
alterTableViaCatalogBaseTable(ObjectPath tablePath,
CatalogBaseTable baseTable,
org.apache.hadoop.hive.metastore.api.Table oldHiveTable,
org.apache.hadoop.hive.conf.HiveConf hiveConf) |
static void |
checkAcidTable(CatalogTable catalogTable,
ObjectPath tablePath)
Check whether to read or write on the hive ACID table.
|
static List<org.apache.hadoop.hive.metastore.api.FieldSchema> |
createHiveColumns(TableSchema schema)
Create Hive columns from Flink TableSchema.
|
static org.apache.hadoop.hive.metastore.api.Partition |
createHivePartition(String dbName,
String tableName,
List<String> values,
org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
Map<String,String> parameters)
Creates a Hive partition instance.
|
static TableSchema |
createTableSchema(List<org.apache.hadoop.hive.metastore.api.FieldSchema> cols,
List<org.apache.hadoop.hive.metastore.api.FieldSchema> partitionKeys,
Set<String> notNullColumns,
UniqueConstraint primaryKey)
Create a Flink's TableSchema from Hive table's columns and partition keys.
|
static byte |
enableConstraint(byte trait) |
static SqlAlterHiveTable.AlterTableOp |
extractAlterTableOp(Map<String,String> props) |
static void |
extractLocation(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
Map<String,String> properties) |
static void |
extractRowFormat(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
Map<String,String> properties) |
static Configuration |
getHadoopConfiguration(String hadoopConfDir)
Returns a new Hadoop Configuration object using the path to the hadoop conf configured.
|
static void |
initiateTableFromProperties(org.apache.hadoop.hive.metastore.api.Table hiveTable,
Map<String,String> properties,
org.apache.hadoop.hive.conf.HiveConf hiveConf)
Extract DDL semantics from properties and use it to initiate the table.
|
static org.apache.hadoop.hive.metastore.api.Table |
instantiateHiveTable(ObjectPath tablePath,
CatalogBaseTable table,
org.apache.hadoop.hive.conf.HiveConf hiveConf) |
static Optional<String> |
makePartitionFilter(int partColOffset,
List<String> partColNames,
List<Expression> expressions,
HiveShim hiveShim)
Generates a filter string for partition columns from the given filter expressions.
|
static Map<String,String> |
maskFlinkProperties(Map<String,String> properties)
Add a prefix to Flink-created properties to distinguish them from Hive-created properties.
|
static byte |
relyConstraint(byte trait) |
static boolean |
requireEnableConstraint(byte trait) |
static boolean |
requireRelyConstraint(byte trait) |
static boolean |
requireValidateConstraint(byte trait) |
static void |
setDefaultStorageFormat(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
org.apache.hadoop.hive.conf.HiveConf hiveConf) |
static void |
setStorageFormat(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
String format,
org.apache.hadoop.hive.conf.HiveConf hiveConf) |
static byte |
validateConstraint(byte trait) |
public static TableSchema createTableSchema(List<org.apache.hadoop.hive.metastore.api.FieldSchema> cols, List<org.apache.hadoop.hive.metastore.api.FieldSchema> partitionKeys, Set<String> notNullColumns, UniqueConstraint primaryKey)
public static List<org.apache.hadoop.hive.metastore.api.FieldSchema> createHiveColumns(TableSchema schema)
public static org.apache.hadoop.hive.metastore.api.Partition createHivePartition(String dbName, String tableName, List<String> values, org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, Map<String,String> parameters)
public static byte enableConstraint(byte trait)
public static byte validateConstraint(byte trait)
public static byte relyConstraint(byte trait)
public static boolean requireEnableConstraint(byte trait)
public static boolean requireValidateConstraint(byte trait)
public static boolean requireRelyConstraint(byte trait)
public static Optional<String> makePartitionFilter(int partColOffset, List<String> partColNames, List<Expression> expressions, HiveShim hiveShim)
partColOffset
- The number of non-partition columns -- used to shift field reference
indexpartColNames
- The names of all partition columnsexpressions
- The filter expressions in CNF formpublic static void initiateTableFromProperties(org.apache.hadoop.hive.metastore.api.Table hiveTable, Map<String,String> properties, org.apache.hadoop.hive.conf.HiveConf hiveConf)
public static void extractLocation(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, Map<String,String> properties)
public static void extractRowFormat(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, Map<String,String> properties)
public static void setStorageFormat(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, String format, org.apache.hadoop.hive.conf.HiveConf hiveConf)
public static void setDefaultStorageFormat(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, org.apache.hadoop.hive.conf.HiveConf hiveConf)
public static void alterColumns(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, CatalogTable catalogTable)
public static SqlAlterHiveTable.AlterTableOp extractAlterTableOp(Map<String,String> props)
public static org.apache.hadoop.hive.metastore.api.Table alterTableViaCatalogBaseTable(ObjectPath tablePath, CatalogBaseTable baseTable, org.apache.hadoop.hive.metastore.api.Table oldHiveTable, org.apache.hadoop.hive.conf.HiveConf hiveConf)
public static org.apache.hadoop.hive.metastore.api.Table instantiateHiveTable(ObjectPath tablePath, CatalogBaseTable table, org.apache.hadoop.hive.conf.HiveConf hiveConf)
public static Map<String,String> maskFlinkProperties(Map<String,String> properties)
public static void checkAcidTable(CatalogTable catalogTable, ObjectPath tablePath)
catalogTable
- Hive catalog table.tablePath
- Identifier table path.FlinkHiveException
- Thrown, if the source or sink table is transactional.public static Configuration getHadoopConfiguration(String hadoopConfDir)
hadoopConfDir
- Hadoop conf directory path.Copyright © 2014–2021 The Apache Software Foundation. All rights reserved.