public class HiveTableUtil extends Object
Modifier and Type | Method and Description |
---|---|
static void |
alterColumns(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
ResolvedCatalogTable resolvedCatalogTable) |
static org.apache.hadoop.hive.metastore.api.Table |
alterTableViaCatalogBaseTable(ObjectPath tablePath,
ResolvedCatalogBaseTable baseTable,
org.apache.hadoop.hive.metastore.api.Table oldHiveTable,
HiveConf hiveConf,
boolean managedTable) |
static void |
checkAcidTable(Map<String,String> tableOptions,
ObjectPath tablePath)
Check whether to read or write on the hive ACID table.
|
static List<org.apache.hadoop.hive.metastore.api.FieldSchema> |
createHiveColumns(ResolvedSchema schema)
Create Hive columns from Flink ResolvedSchema.
|
static org.apache.hadoop.hive.metastore.api.Partition |
createHivePartition(String dbName,
String tableName,
List<String> values,
org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
Map<String,String> parameters)
Creates a Hive partition instance.
|
static ResolvedSchema |
createResolvedSchema(List<org.apache.hadoop.hive.metastore.api.FieldSchema> nonPartCols,
List<org.apache.hadoop.hive.metastore.api.FieldSchema> partitionKeys,
Set<String> notNullColumns,
UniqueConstraint primaryKey)
Create a Flink's ResolvedSchema from Hive table's columns and partition keys.
|
static Schema |
createSchema(HiveConf hiveConf,
org.apache.hadoop.hive.metastore.api.Table hiveTable,
HiveMetastoreClientWrapper client,
HiveShim hiveShim)
Create a Flink's Schema by hive client.
|
static Schema |
createSchema(List<org.apache.hadoop.hive.metastore.api.FieldSchema> nonPartCols,
List<org.apache.hadoop.hive.metastore.api.FieldSchema> partitionKeys,
Set<String> notNullColumns,
UniqueConstraint primaryKey)
Create a Flink's Schema from Hive table's columns and partition keys.
|
static byte |
enableConstraint(byte trait) |
static AlterTableOp |
extractAlterTableOp(Map<String,String> props) |
static void |
extractLocation(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
Map<String,String> properties) |
static void |
extractRowFormat(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
Map<String,String> properties) |
static DataType |
extractRowType(HiveConf hiveConf,
org.apache.hadoop.hive.metastore.api.Table hiveTable,
HiveMetastoreClientWrapper client,
HiveShim hiveShim)
Create the Hive table's row type.
|
static void |
extractStoredAs(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
Map<String,String> properties,
HiveConf hiveConf) |
static org.apache.hadoop.conf.Configuration |
getHadoopConfiguration(String hadoopConfDir)
Returns a new Hadoop Configuration object using the path to the hadoop conf configured.
|
static List<org.apache.hadoop.hive.metastore.api.FieldSchema> |
getNonPartitionFields(HiveConf hiveConf,
org.apache.hadoop.hive.metastore.api.Table hiveTable,
HiveShim hiveShim) |
static org.apache.hadoop.hive.metastore.api.Table |
instantiateHiveTable(ObjectPath tablePath,
ResolvedCatalogBaseTable table,
HiveConf hiveConf,
boolean managedTable) |
static Optional<String> |
makePartitionFilter(int partColOffset,
List<String> partColNames,
List<Expression> expressions,
HiveShim hiveShim)
Generates a filter string for partition columns from the given filter expressions.
|
static byte |
relyConstraint(byte trait) |
static boolean |
requireEnableConstraint(byte trait) |
static boolean |
requireRelyConstraint(byte trait) |
static boolean |
requireValidateConstraint(byte trait) |
static void |
setDefaultStorageFormat(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
HiveConf hiveConf) |
static void |
setDefaultStorageFormatForDirectory(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
HiveConf hiveConf) |
static void |
setStorageFormat(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd,
String format,
HiveConf hiveConf) |
static byte |
validateConstraint(byte trait) |
public static Schema createSchema(HiveConf hiveConf, org.apache.hadoop.hive.metastore.api.Table hiveTable, HiveMetastoreClientWrapper client, HiveShim hiveShim)
public static Schema createSchema(List<org.apache.hadoop.hive.metastore.api.FieldSchema> nonPartCols, List<org.apache.hadoop.hive.metastore.api.FieldSchema> partitionKeys, Set<String> notNullColumns, @Nullable UniqueConstraint primaryKey)
public static ResolvedSchema createResolvedSchema(List<org.apache.hadoop.hive.metastore.api.FieldSchema> nonPartCols, List<org.apache.hadoop.hive.metastore.api.FieldSchema> partitionKeys, Set<String> notNullColumns, @Nullable UniqueConstraint primaryKey)
public static DataType extractRowType(HiveConf hiveConf, org.apache.hadoop.hive.metastore.api.Table hiveTable, HiveMetastoreClientWrapper client, HiveShim hiveShim)
public static List<org.apache.hadoop.hive.metastore.api.FieldSchema> createHiveColumns(ResolvedSchema schema)
public static org.apache.hadoop.hive.metastore.api.Partition createHivePartition(String dbName, String tableName, List<String> values, org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, Map<String,String> parameters)
public static byte enableConstraint(byte trait)
public static byte validateConstraint(byte trait)
public static byte relyConstraint(byte trait)
public static boolean requireEnableConstraint(byte trait)
public static boolean requireValidateConstraint(byte trait)
public static boolean requireRelyConstraint(byte trait)
public static Optional<String> makePartitionFilter(int partColOffset, List<String> partColNames, List<Expression> expressions, HiveShim hiveShim)
partColOffset
- The number of non-partition columns -- used to shift field reference
indexpartColNames
- The names of all partition columnsexpressions
- The filter expressions in CNF formpublic static void extractLocation(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, Map<String,String> properties)
public static void extractRowFormat(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, Map<String,String> properties)
public static void extractStoredAs(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, Map<String,String> properties, HiveConf hiveConf)
public static void setStorageFormat(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, String format, HiveConf hiveConf)
public static void setDefaultStorageFormat(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, HiveConf hiveConf)
public static void setDefaultStorageFormatForDirectory(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, HiveConf hiveConf)
public static void alterColumns(org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, ResolvedCatalogTable resolvedCatalogTable)
public static AlterTableOp extractAlterTableOp(Map<String,String> props)
public static org.apache.hadoop.hive.metastore.api.Table alterTableViaCatalogBaseTable(ObjectPath tablePath, ResolvedCatalogBaseTable baseTable, org.apache.hadoop.hive.metastore.api.Table oldHiveTable, HiveConf hiveConf, boolean managedTable)
public static org.apache.hadoop.hive.metastore.api.Table instantiateHiveTable(ObjectPath tablePath, ResolvedCatalogBaseTable table, HiveConf hiveConf, boolean managedTable)
public static void checkAcidTable(Map<String,String> tableOptions, ObjectPath tablePath)
tableOptions
- Hive table options.tablePath
- Identifier table path.FlinkHiveException
- Thrown, if the source or sink table is transactional.public static org.apache.hadoop.conf.Configuration getHadoopConfiguration(String hadoopConfDir)
hadoopConfDir
- Hadoop conf directory path.Copyright © 2014–2024 The Apache Software Foundation. All rights reserved.