public class SparkScan extends Object implements org.apache.spark.sql.connector.read.Scan, org.apache.spark.sql.connector.read.SupportsReportStatistics
Scan
for table store.
TODO Introduce a SparkRFScan to implement SupportsRuntimeFiltering.
Constructor and Description |
---|
SparkScan(Table table,
List<Predicate> predicates,
int[] projectedFields,
org.apache.flink.configuration.Configuration conf) |
Modifier and Type | Method and Description |
---|---|
String |
description() |
boolean |
equals(Object o) |
org.apache.spark.sql.connector.read.Statistics |
estimateStatistics() |
int |
hashCode() |
org.apache.spark.sql.types.StructType |
readSchema() |
protected List<Split> |
splits() |
org.apache.spark.sql.connector.read.Batch |
toBatch() |
protected final Table table
public String description()
description
in interface org.apache.spark.sql.connector.read.Scan
public org.apache.spark.sql.types.StructType readSchema()
readSchema
in interface org.apache.spark.sql.connector.read.Scan
public org.apache.spark.sql.connector.read.Batch toBatch()
toBatch
in interface org.apache.spark.sql.connector.read.Scan
public org.apache.spark.sql.connector.read.Statistics estimateStatistics()
estimateStatistics
in interface org.apache.spark.sql.connector.read.SupportsReportStatistics
Copyright © 2019–2023 The Apache Software Foundation. All rights reserved.