Modifier and Type | Method and Description |
---|---|
protected Row |
HBaseRowInputFormat.mapResultToOutType(org.apache.hadoop.hbase.client.Result res) |
Modifier and Type | Method and Description |
---|---|
DataSet<Row> |
HBaseTableSource.getDataSet(ExecutionEnvironment execEnv) |
TypeInformation<Row> |
HBaseRowInputFormat.getProducedType() |
TypeInformation<Row> |
HBaseTableSource.getReturnType() |
Modifier and Type | Method and Description |
---|---|
protected Row |
RowCsvInputFormat.fillRecord(Row reuse,
Object[] parsedValues) |
Modifier and Type | Method and Description |
---|---|
TypeInformation<Row> |
RowCsvInputFormat.getProducedType() |
Modifier and Type | Method and Description |
---|---|
protected Row |
RowCsvInputFormat.fillRecord(Row reuse,
Object[] parsedValues) |
Modifier and Type | Method and Description |
---|---|
Row |
JDBCInputFormat.nextRecord(Row row)
Stores the next resultSet row in a tuple.
|
Modifier and Type | Method and Description |
---|---|
org.apache.flink.table.sinks.TableSink<Row> |
JDBCAppendTableSink.configure(String[] fieldNames,
TypeInformation<?>[] fieldTypes) |
TypeInformation<Row> |
JDBCAppendTableSink.getOutputType() |
Modifier and Type | Method and Description |
---|---|
Row |
JDBCInputFormat.nextRecord(Row row)
Stores the next resultSet row in a tuple.
|
void |
JDBCOutputFormat.writeRecord(Row row)
Adds a record to the prepared statement.
|
Modifier and Type | Method and Description |
---|---|
void |
JDBCAppendTableSink.emitDataSet(DataSet<Row> dataSet) |
void |
JDBCAppendTableSink.emitDataStream(DataStream<Row> dataStream) |
Modifier and Type | Method and Description |
---|---|
TypeComparator<Row> |
RowTypeInfo.createComparator(int[] logicalKeyFields,
boolean[] orders,
int logicalFieldOffset,
ExecutionConfig config) |
TypeSerializer<Row> |
RowTypeInfo.createSerializer(ExecutionConfig config) |
protected CompositeType.TypeComparatorBuilder<Row> |
RowTypeInfo.createTypeComparatorBuilder() |
Modifier and Type | Method and Description |
---|---|
Row |
RowSerializer.copy(Row from) |
Row |
RowSerializer.copy(Row from,
Row reuse) |
Row |
RowSerializer.createInstance() |
Row |
RowSerializer.deserialize(DataInputView source) |
Row |
RowSerializer.deserialize(Row reuse,
DataInputView source) |
Row |
RowComparator.readWithKeyDenormalization(Row reuse,
DataInputView source) |
Modifier and Type | Method and Description |
---|---|
TypeSerializer<Row> |
RowSerializer.duplicate() |
TypeComparator<Row> |
RowComparator.duplicate() |
CompatibilityResult<Row> |
RowSerializer.ensureCompatibility(TypeSerializerConfigSnapshot configSnapshot) |
Modifier and Type | Method and Description |
---|---|
int |
RowComparator.compare(Row first,
Row second) |
Row |
RowSerializer.copy(Row from) |
Row |
RowSerializer.copy(Row from,
Row reuse) |
Row |
RowSerializer.deserialize(Row reuse,
DataInputView source) |
boolean |
RowComparator.equalToReference(Row candidate) |
int |
RowComparator.hash(Row record) |
void |
RowComparator.putNormalizedKey(Row record,
MemorySegment target,
int offset,
int numBytes) |
Row |
RowComparator.readWithKeyDenormalization(Row reuse,
DataInputView source) |
void |
RowSerializer.serialize(Row record,
DataOutputView target) |
void |
RowComparator.setReference(Row toCompare) |
static void |
NullMaskUtils.writeNullMask(int len,
Row value,
DataOutputView target) |
void |
RowComparator.writeWithKeyNormalization(Row record,
DataOutputView target) |
Modifier and Type | Method and Description |
---|---|
int |
RowComparator.compareToReference(TypeComparator<Row> referencedComparator) |
Modifier and Type | Method and Description |
---|---|
Row |
AvroRowDeserializationSchema.deserialize(byte[] message) |
Modifier and Type | Method and Description |
---|---|
byte[] |
AvroRowSerializationSchema.serialize(Row row) |
Modifier and Type | Method and Description |
---|---|
Row |
OrcRowInputFormat.nextRecord(Row reuse) |
Modifier and Type | Method and Description |
---|---|
org.apache.flink.table.sources.TableSource<Row> |
OrcTableSource.applyPredicate(List<org.apache.flink.table.expressions.Expression> predicates) |
DataSet<Row> |
OrcTableSource.getDataSet(ExecutionEnvironment execEnv) |
TypeInformation<Row> |
OrcRowInputFormat.getProducedType() |
TypeInformation<Row> |
OrcTableSource.getReturnType() |
org.apache.flink.table.sources.TableSource<Row> |
OrcTableSource.projectFields(int[] selectedFields) |
Modifier and Type | Method and Description |
---|---|
Row |
OrcRowInputFormat.nextRecord(Row reuse) |
Modifier and Type | Method and Description |
---|---|
protected CassandraSink<Row> |
CassandraSink.CassandraRowSinkBuilder.createSink() |
protected CassandraSink<Row> |
CassandraSink.CassandraRowSinkBuilder.createWriteAheadSink() |
TypeInformation<Row> |
CassandraAppendTableSink.getOutputType() |
Modifier and Type | Method and Description |
---|---|
protected Object[] |
CassandraRowSink.extract(Row record) |
Modifier and Type | Method and Description |
---|---|
void |
CassandraAppendTableSink.emitDataStream(DataStream<Row> dataStream) |
protected boolean |
CassandraRowWriteAheadSink.sendValues(Iterable<Row> values,
long checkpointId,
long timestamp) |
Constructor and Description |
---|
CassandraRowSinkBuilder(DataStream<Row> input,
TypeInformation<Row> typeInfo,
TypeSerializer<Row> serializer) |
CassandraRowSinkBuilder(DataStream<Row> input,
TypeInformation<Row> typeInfo,
TypeSerializer<Row> serializer) |
CassandraRowSinkBuilder(DataStream<Row> input,
TypeInformation<Row> typeInfo,
TypeSerializer<Row> serializer) |
CassandraRowWriteAheadSink(String insertQuery,
TypeSerializer<Row> serializer,
ClusterBuilder builder,
CheckpointCommitter committer) |
Modifier and Type | Field and Description |
---|---|
protected FlinkKafkaPartitioner<Row> |
KafkaTableSink.partitioner |
protected SerializationSchema<Row> |
KafkaTableSink.serializationSchema |
Modifier and Type | Method and Description |
---|---|
protected FlinkKafkaConsumerBase<Row> |
Kafka011JsonTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka011TableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka011AvroTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka010TableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka010AvroTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka010JsonTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka09JsonTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka09AvroTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka09TableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka08AvroTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka08TableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka08JsonTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected abstract FlinkKafkaConsumerBase<Row> |
KafkaTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema)
Creates a version-specific Kafka consumer.
|
protected FlinkKafkaProducerBase<Row> |
Kafka010JsonTableSink.createKafkaProducer(String topic,
Properties properties,
SerializationSchema<Row> serializationSchema,
FlinkKafkaPartitioner<Row> partitioner) |
protected FlinkKafkaProducerBase<Row> |
Kafka09JsonTableSink.createKafkaProducer(String topic,
Properties properties,
SerializationSchema<Row> serializationSchema,
FlinkKafkaPartitioner<Row> partitioner) |
protected FlinkKafkaProducerBase<Row> |
Kafka08JsonTableSink.createKafkaProducer(String topic,
Properties properties,
SerializationSchema<Row> serializationSchema,
FlinkKafkaPartitioner<Row> partitioner) |
protected abstract FlinkKafkaProducerBase<Row> |
KafkaTableSink.createKafkaProducer(String topic,
Properties properties,
SerializationSchema<Row> serializationSchema,
FlinkKafkaPartitioner<Row> partitioner)
Returns the version-specifid Kafka producer.
|
protected abstract SerializationSchema<Row> |
KafkaTableSink.createSerializationSchema(RowTypeInfo rowSchema)
Create serialization schema for converting table rows into bytes.
|
protected SerializationSchema<Row> |
KafkaJsonTableSink.createSerializationSchema(RowTypeInfo rowSchema) |
DataStream<Row> |
KafkaTableSource.getDataStream(StreamExecutionEnvironment env)
NOTE: This method is for internal use only for defining a TableSource.
|
DeserializationSchema<Row> |
Kafka011TableSource.getDeserializationSchema() |
DeserializationSchema<Row> |
Kafka010TableSource.getDeserializationSchema() |
DeserializationSchema<Row> |
Kafka09TableSource.getDeserializationSchema() |
DeserializationSchema<Row> |
Kafka08TableSource.getDeserializationSchema() |
protected abstract DeserializationSchema<Row> |
KafkaTableSource.getDeserializationSchema()
Returns the deserialization schema.
|
protected FlinkKafkaConsumerBase<Row> |
KafkaTableSource.getKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema)
Returns a version-specific Kafka consumer with the start position configured.
|
TypeInformation<Row> |
KafkaTableSink.getOutputType() |
TypeInformation<Row> |
KafkaTableSource.getReturnType() |
Modifier and Type | Method and Description |
---|---|
protected FlinkKafkaConsumerBase<Row> |
Kafka011JsonTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka011TableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka011AvroTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka010TableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka010AvroTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka010JsonTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka09JsonTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka09AvroTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka09TableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka08AvroTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka08TableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected FlinkKafkaConsumerBase<Row> |
Kafka08JsonTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema) |
protected abstract FlinkKafkaConsumerBase<Row> |
KafkaTableSource.createKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema)
Creates a version-specific Kafka consumer.
|
protected FlinkKafkaProducerBase<Row> |
Kafka010JsonTableSink.createKafkaProducer(String topic,
Properties properties,
SerializationSchema<Row> serializationSchema,
FlinkKafkaPartitioner<Row> partitioner) |
protected FlinkKafkaProducerBase<Row> |
Kafka010JsonTableSink.createKafkaProducer(String topic,
Properties properties,
SerializationSchema<Row> serializationSchema,
FlinkKafkaPartitioner<Row> partitioner) |
protected FlinkKafkaProducerBase<Row> |
Kafka09JsonTableSink.createKafkaProducer(String topic,
Properties properties,
SerializationSchema<Row> serializationSchema,
FlinkKafkaPartitioner<Row> partitioner) |
protected FlinkKafkaProducerBase<Row> |
Kafka09JsonTableSink.createKafkaProducer(String topic,
Properties properties,
SerializationSchema<Row> serializationSchema,
FlinkKafkaPartitioner<Row> partitioner) |
protected FlinkKafkaProducerBase<Row> |
Kafka08JsonTableSink.createKafkaProducer(String topic,
Properties properties,
SerializationSchema<Row> serializationSchema,
FlinkKafkaPartitioner<Row> partitioner) |
protected FlinkKafkaProducerBase<Row> |
Kafka08JsonTableSink.createKafkaProducer(String topic,
Properties properties,
SerializationSchema<Row> serializationSchema,
FlinkKafkaPartitioner<Row> partitioner) |
protected abstract FlinkKafkaProducerBase<Row> |
KafkaTableSink.createKafkaProducer(String topic,
Properties properties,
SerializationSchema<Row> serializationSchema,
FlinkKafkaPartitioner<Row> partitioner)
Returns the version-specifid Kafka producer.
|
protected abstract FlinkKafkaProducerBase<Row> |
KafkaTableSink.createKafkaProducer(String topic,
Properties properties,
SerializationSchema<Row> serializationSchema,
FlinkKafkaPartitioner<Row> partitioner)
Returns the version-specifid Kafka producer.
|
void |
KafkaTableSink.emitDataStream(DataStream<Row> dataStream) |
protected FlinkKafkaConsumerBase<Row> |
KafkaTableSource.getKafkaConsumer(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema)
Returns a version-specific Kafka consumer with the start position configured.
|
Constructor and Description |
---|
Kafka010JsonTableSink(String topic,
Properties properties,
FlinkKafkaPartitioner<Row> partitioner)
Creates
KafkaTableSink to write table rows as JSON-encoded records to a Kafka 0.10
topic with custom partition assignment. |
Kafka010TableSource(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
org.apache.flink.table.api.TableSchema schema,
TypeInformation<Row> typeInfo)
Creates a Kafka 0.10
StreamTableSource . |
Kafka010TableSource(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
org.apache.flink.table.api.TableSchema schema,
TypeInformation<Row> typeInfo)
Creates a Kafka 0.10
StreamTableSource . |
Kafka011TableSource(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
org.apache.flink.table.api.TableSchema schema,
TypeInformation<Row> typeInfo)
Creates a Kafka 0.11
StreamTableSource . |
Kafka011TableSource(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
org.apache.flink.table.api.TableSchema schema,
TypeInformation<Row> typeInfo)
Creates a Kafka 0.11
StreamTableSource . |
Kafka08JsonTableSink(String topic,
Properties properties,
FlinkKafkaPartitioner<Row> partitioner)
Creates
KafkaTableSink to write table rows as JSON-encoded records to a Kafka 0.8
topic with custom partition assignment. |
Kafka08JsonTableSink(String topic,
Properties properties,
KafkaPartitioner<Row> partitioner)
Deprecated.
This is a deprecated constructor that does not correctly handle partitioning when
producing to multiple topics. Use
Kafka08JsonTableSink.Kafka08JsonTableSink(String, Properties, FlinkKafkaPartitioner) instead. |
Kafka08TableSource(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
org.apache.flink.table.api.TableSchema schema,
TypeInformation<Row> typeInfo)
Creates a Kafka 0.8
StreamTableSource . |
Kafka08TableSource(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
org.apache.flink.table.api.TableSchema schema,
TypeInformation<Row> typeInfo)
Creates a Kafka 0.8
StreamTableSource . |
Kafka09JsonTableSink(String topic,
Properties properties,
FlinkKafkaPartitioner<Row> partitioner)
Creates
KafkaTableSink to write table rows as JSON-encoded records to a Kafka 0.9
topic with custom partition assignment. |
Kafka09JsonTableSink(String topic,
Properties properties,
KafkaPartitioner<Row> partitioner)
Deprecated.
This is a deprecated constructor that does not correctly handle partitioning when
producing to multiple topics. Use
Kafka09JsonTableSink.Kafka09JsonTableSink(String, Properties, FlinkKafkaPartitioner) instead. |
Kafka09TableSource(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
org.apache.flink.table.api.TableSchema schema,
TypeInformation<Row> typeInfo)
Creates a Kafka 0.9
StreamTableSource . |
Kafka09TableSource(String topic,
Properties properties,
DeserializationSchema<Row> deserializationSchema,
org.apache.flink.table.api.TableSchema schema,
TypeInformation<Row> typeInfo)
Creates a Kafka 0.9
StreamTableSource . |
KafkaJsonTableSink(String topic,
Properties properties,
FlinkKafkaPartitioner<Row> partitioner)
Creates KafkaJsonTableSink.
|
KafkaTableSink(String topic,
Properties properties,
FlinkKafkaPartitioner<Row> partitioner)
Creates KafkaTableSink.
|
KafkaTableSource(String topic,
Properties properties,
org.apache.flink.table.api.TableSchema schema,
TypeInformation<Row> returnType)
Creates a generic Kafka
StreamTableSource . |
Modifier and Type | Method and Description |
---|---|
Row |
JsonRowDeserializationSchema.deserialize(byte[] message) |
Modifier and Type | Method and Description |
---|---|
TypeInformation<Row> |
JsonRowDeserializationSchema.getProducedType() |
Modifier and Type | Method and Description |
---|---|
boolean |
JsonRowDeserializationSchema.isEndOfStream(Row nextElement) |
byte[] |
JsonRowSerializationSchema.serialize(Row row) |
Constructor and Description |
---|
JsonRowDeserializationSchema(TypeInformation<Row> typeInfo)
Creates a JSON deserialization schema for the given fields and types.
|
Modifier and Type | Method and Description |
---|---|
static Row |
Row.copy(Row row)
Creates a new Row which copied from another row.
|
static Row |
Row.of(Object... values)
Creates a new Row and assigns the given values to the Row's fields.
|
static Row |
Row.project(Row row,
int[] fields)
Creates a new Row with projected fields from another row.
|
Modifier and Type | Method and Description |
---|---|
static Row |
Row.copy(Row row)
Creates a new Row which copied from another row.
|
static Row |
Row.project(Row row,
int[] fields)
Creates a new Row with projected fields from another row.
|
Copyright © 2014–2018 The Apache Software Foundation. All rights reserved.