public class HiveShimV100 extends Object implements HiveShim
Modifier and Type | Field and Description |
---|---|
protected boolean |
holdDDLTime |
protected boolean |
inheritTableSpecs |
protected boolean |
isAcid |
protected boolean |
isSkewedStoreAsSubdir |
Constructor and Description |
---|
HiveShimV100() |
Modifier and Type | Method and Description |
---|---|
void |
alterPartition(org.apache.hadoop.hive.metastore.IMetaStoreClient client,
String databaseName,
String tableName,
org.apache.hadoop.hive.metastore.api.Partition partition) |
void |
alterTable(org.apache.hadoop.hive.metastore.IMetaStoreClient client,
String databaseName,
String tableName,
org.apache.hadoop.hive.metastore.api.Table table)
Alters a Hive table.
|
BulkWriter.Factory<RowData> |
createOrcBulkWriterFactory(org.apache.hadoop.conf.Configuration conf,
String schema,
LogicalType[] fieldTypes)
Create orc
BulkWriter.Factory for different hive versions. |
void |
createTableWithConstraints(org.apache.hadoop.hive.metastore.IMetaStoreClient client,
org.apache.hadoop.hive.metastore.api.Table table,
org.apache.hadoop.conf.Configuration conf,
UniqueConstraint pk,
List<Byte> pkTraits,
List<String> notNullCols,
List<Byte> nnTraits)
Creates a table with PK and NOT NULL constraints.
|
org.apache.hadoop.hive.ql.udf.generic.SimpleGenericUDAFParameterInfo |
createUDAFParameterInfo(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector[] params,
boolean isWindowing,
boolean distinct,
boolean allColumns)
Creates SimpleGenericUDAFParameterInfo.
|
Optional<org.apache.hadoop.hive.ql.exec.FunctionInfo> |
getBuiltInFunctionInfo(String name)
Get a Hive built-in function by name.
|
Class<?> |
getDateDataTypeClass()
Hive Date data type class was changed in Hive 3.1.0.
|
Class<?> |
getDateWritableClass()
Gets writable class for Date type.
|
List<org.apache.hadoop.hive.metastore.api.FieldSchema> |
getFieldsFromDeserializer(org.apache.hadoop.conf.Configuration conf,
org.apache.hadoop.hive.metastore.api.Table table,
boolean skipConfError)
Get Hive table schema from deserializer.
|
org.apache.hadoop.hive.metastore.IMetaStoreClient |
getHiveMetastoreClient(HiveConf hiveConf)
Create a Hive Metastore client based on the given HiveConf object.
|
Class<?> |
getHiveMetaStoreUtilsClass()
Get the class of Hive's HiveMetaStoreUtils as it was split from MetaStoreUtils class in Hive
3.1.0.
|
Class |
getHiveOutputFormatClass(Class outputFormatClz)
For a given OutputFormat class, get the corresponding
HiveOutputFormat class. |
org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter |
getHiveRecordWriter(org.apache.hadoop.mapred.JobConf jobConf,
Class outputFormatClz,
Class<? extends org.apache.hadoop.io.Writable> outValClz,
boolean isCompressed,
Properties tableProps,
org.apache.hadoop.fs.Path outPath)
Get Hive's FileSinkOperator.RecordWriter.
|
Class<?> |
getMetaStoreUtilsClass()
Get the class of Hive's MetaStoreUtils because its package name was changed in Hive 3.1.0.
|
Set<String> |
getNotNullColumns(org.apache.hadoop.hive.metastore.IMetaStoreClient client,
org.apache.hadoop.conf.Configuration conf,
String dbName,
String tableName)
Get the set of columns that have NOT NULL constraints.
|
Optional<UniqueConstraint> |
getPrimaryKey(org.apache.hadoop.hive.metastore.IMetaStoreClient client,
String dbName,
String tableName,
byte requiredTrait)
Get the primary key of a Hive table and convert it to a UniqueConstraint.
|
Class<?> |
getTimestampDataTypeClass()
Hive Timestamp data type class was changed in Hive 3.1.0.
|
Class<?> |
getTimestampWritableClass()
Gets writable class for Timestamp type.
|
List<String> |
getViews(org.apache.hadoop.hive.metastore.IMetaStoreClient client,
String databaseName)
Get a list of views in the given database from the given Hive Metastore client.
|
org.apache.hadoop.io.Writable |
hivePrimitiveToWritable(Object value)
Converts a Hive primitive java object to corresponding Writable object.
|
boolean |
isDateStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData colStatsData)
Whether a Hive ColumnStatisticsData is for DATE columns.
|
Set<String> |
listBuiltInFunctions()
List names of all built-in functions.
|
void |
loadPartition(org.apache.hadoop.hive.ql.metadata.Hive hive,
org.apache.hadoop.fs.Path loadPath,
String tableName,
Map<String,String> partSpec,
boolean isSkewedStoreAsSubdir,
boolean replace,
boolean isSrcLocal) |
void |
loadTable(org.apache.hadoop.hive.ql.metadata.Hive hive,
org.apache.hadoop.fs.Path loadPath,
String tableName,
boolean replace,
boolean isSrcLocal) |
void |
registerTemporaryFunction(String funcName,
Class funcClass) |
java.time.LocalDate |
toFlinkDate(Object hiveDate)
Converts a hive date instance to LocalDate which is expected by DataFormatConverter.
|
CatalogColumnStatisticsDataDate |
toFlinkDateColStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData hiveDateColStats)
Generate Flink CatalogColumnStatisticsDataDate from Hive ColumnStatisticsData for DATE
columns.
|
java.time.LocalDateTime |
toFlinkTimestamp(Object hiveTimestamp)
Converts a hive timestamp instance to LocalDateTime which is expected by DataFormatConverter.
|
Object |
toHiveDate(Object flinkDate)
Converts a Flink date instance to what's expected by Hive.
|
org.apache.hadoop.hive.metastore.api.ColumnStatisticsData |
toHiveDateColStats(CatalogColumnStatisticsDataDate flinkDateColStats)
Generate Hive ColumnStatisticsData from Flink CatalogColumnStatisticsDataDate for DATE
columns.
|
Object |
toHiveTimestamp(Object flinkTimestamp)
Converts a Flink timestamp instance to what's expected by Hive.
|
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
getIntervalDayTimeTypeInfo, getIntervalYearMonthTypeInfo, isIntervalDayTimeType, isIntervalYearMonthType, isMaterializedView
protected final boolean holdDDLTime
protected final boolean isAcid
protected final boolean inheritTableSpecs
protected final boolean isSkewedStoreAsSubdir
public org.apache.hadoop.hive.metastore.IMetaStoreClient getHiveMetastoreClient(HiveConf hiveConf)
HiveShim
getHiveMetastoreClient
in interface HiveShim
hiveConf
- HiveConf instancepublic List<String> getViews(org.apache.hadoop.hive.metastore.IMetaStoreClient client, String databaseName) throws org.apache.hadoop.hive.metastore.api.UnknownDBException, org.apache.thrift.TException
HiveShim
getViews
in interface HiveShim
client
- Hive Metastore clientdatabaseName
- the name of the databaseorg.apache.hadoop.hive.metastore.api.UnknownDBException
- if the database doesn't existorg.apache.thrift.TException
- for any other generic exceptions caused by Thriftpublic void alterTable(org.apache.hadoop.hive.metastore.IMetaStoreClient client, String databaseName, String tableName, org.apache.hadoop.hive.metastore.api.Table table) throws org.apache.hadoop.hive.metastore.api.InvalidOperationException, org.apache.hadoop.hive.metastore.api.MetaException, org.apache.thrift.TException
HiveShim
alterTable
in interface HiveShim
client
- the Hive metastore clientdatabaseName
- the name of the database to which the table belongstableName
- the name of the table to be alteredtable
- the new Hive tableorg.apache.hadoop.hive.metastore.api.InvalidOperationException
org.apache.hadoop.hive.metastore.api.MetaException
org.apache.thrift.TException
public void alterPartition(org.apache.hadoop.hive.metastore.IMetaStoreClient client, String databaseName, String tableName, org.apache.hadoop.hive.metastore.api.Partition partition) throws org.apache.hadoop.hive.metastore.api.InvalidOperationException, org.apache.hadoop.hive.metastore.api.MetaException, org.apache.thrift.TException
alterPartition
in interface HiveShim
org.apache.hadoop.hive.metastore.api.InvalidOperationException
org.apache.hadoop.hive.metastore.api.MetaException
org.apache.thrift.TException
public org.apache.hadoop.hive.ql.udf.generic.SimpleGenericUDAFParameterInfo createUDAFParameterInfo(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector[] params, boolean isWindowing, boolean distinct, boolean allColumns)
HiveShim
createUDAFParameterInfo
in interface HiveShim
public Class<?> getMetaStoreUtilsClass()
HiveShim
getMetaStoreUtilsClass
in interface HiveShim
public Class<?> getHiveMetaStoreUtilsClass()
HiveShim
getHiveMetaStoreUtilsClass
in interface HiveShim
public Class<?> getDateDataTypeClass()
HiveShim
getDateDataTypeClass
in interface HiveShim
public Class<?> getDateWritableClass()
HiveShim
getDateWritableClass
in interface HiveShim
public Class<?> getTimestampDataTypeClass()
HiveShim
getTimestampDataTypeClass
in interface HiveShim
public Class<?> getTimestampWritableClass()
HiveShim
getTimestampWritableClass
in interface HiveShim
public org.apache.hadoop.hive.metastore.api.ColumnStatisticsData toHiveDateColStats(CatalogColumnStatisticsDataDate flinkDateColStats)
HiveShim
toHiveDateColStats
in interface HiveShim
public boolean isDateStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData colStatsData)
HiveShim
isDateStats
in interface HiveShim
public CatalogColumnStatisticsDataDate toFlinkDateColStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData hiveDateColStats)
HiveShim
toFlinkDateColStats
in interface HiveShim
public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getHiveRecordWriter(org.apache.hadoop.mapred.JobConf jobConf, Class outputFormatClz, Class<? extends org.apache.hadoop.io.Writable> outValClz, boolean isCompressed, Properties tableProps, org.apache.hadoop.fs.Path outPath)
HiveShim
getHiveRecordWriter
in interface HiveShim
public Class getHiveOutputFormatClass(Class outputFormatClz)
HiveShim
HiveOutputFormat
class.getHiveOutputFormatClass
in interface HiveShim
public List<org.apache.hadoop.hive.metastore.api.FieldSchema> getFieldsFromDeserializer(org.apache.hadoop.conf.Configuration conf, org.apache.hadoop.hive.metastore.api.Table table, boolean skipConfError)
HiveShim
getFieldsFromDeserializer
in interface HiveShim
public Set<String> listBuiltInFunctions()
HiveShim
listBuiltInFunctions
in interface HiveShim
public Optional<org.apache.hadoop.hive.ql.exec.FunctionInfo> getBuiltInFunctionInfo(String name)
HiveShim
getBuiltInFunctionInfo
in interface HiveShim
public Set<String> getNotNullColumns(org.apache.hadoop.hive.metastore.IMetaStoreClient client, org.apache.hadoop.conf.Configuration conf, String dbName, String tableName)
HiveShim
getNotNullColumns
in interface HiveShim
public Optional<UniqueConstraint> getPrimaryKey(org.apache.hadoop.hive.metastore.IMetaStoreClient client, String dbName, String tableName, byte requiredTrait)
HiveShim
getPrimaryKey
in interface HiveShim
public Object toHiveTimestamp(Object flinkTimestamp)
HiveShim
toHiveTimestamp
in interface HiveShim
public java.time.LocalDateTime toFlinkTimestamp(Object hiveTimestamp)
HiveShim
toFlinkTimestamp
in interface HiveShim
public Object toHiveDate(Object flinkDate)
HiveShim
toHiveDate
in interface HiveShim
public java.time.LocalDate toFlinkDate(Object hiveDate)
HiveShim
toFlinkDate
in interface HiveShim
public org.apache.hadoop.io.Writable hivePrimitiveToWritable(Object value)
HiveShim
hivePrimitiveToWritable
in interface HiveShim
public void createTableWithConstraints(org.apache.hadoop.hive.metastore.IMetaStoreClient client, org.apache.hadoop.hive.metastore.api.Table table, org.apache.hadoop.conf.Configuration conf, UniqueConstraint pk, List<Byte> pkTraits, List<String> notNullCols, List<Byte> nnTraits)
HiveShim
createTableWithConstraints
in interface HiveShim
public BulkWriter.Factory<RowData> createOrcBulkWriterFactory(org.apache.hadoop.conf.Configuration conf, String schema, LogicalType[] fieldTypes)
HiveShim
BulkWriter.Factory
for different hive versions.createOrcBulkWriterFactory
in interface HiveShim
public void registerTemporaryFunction(String funcName, Class funcClass)
registerTemporaryFunction
in interface HiveShim
public void loadPartition(org.apache.hadoop.hive.ql.metadata.Hive hive, org.apache.hadoop.fs.Path loadPath, String tableName, Map<String,String> partSpec, boolean isSkewedStoreAsSubdir, boolean replace, boolean isSrcLocal)
loadPartition
in interface HiveShim
Copyright © 2014–2024 The Apache Software Foundation. All rights reserved.