public class SparkSource extends Object implements org.apache.spark.sql.sources.DataSourceRegister, org.apache.spark.sql.connector.catalog.SessionConfigSupport
Constructor and Description |
---|
SparkSource() |
Modifier and Type | Method and Description |
---|---|
org.apache.spark.sql.connector.catalog.Table |
getTable(org.apache.spark.sql.types.StructType schema,
org.apache.spark.sql.connector.expressions.Transform[] partitioning,
Map<String,String> options) |
org.apache.spark.sql.connector.expressions.Transform[] |
inferPartitioning(org.apache.spark.sql.util.CaseInsensitiveStringMap options) |
org.apache.spark.sql.types.StructType |
inferSchema(org.apache.spark.sql.util.CaseInsensitiveStringMap options) |
String |
keyPrefix() |
String |
shortName() |
boolean |
supportsExternalMetadata() |
public String shortName()
shortName
in interface org.apache.spark.sql.sources.DataSourceRegister
public org.apache.spark.sql.types.StructType inferSchema(org.apache.spark.sql.util.CaseInsensitiveStringMap options)
inferSchema
in interface org.apache.spark.sql.connector.catalog.TableProvider
public org.apache.spark.sql.connector.expressions.Transform[] inferPartitioning(org.apache.spark.sql.util.CaseInsensitiveStringMap options)
inferPartitioning
in interface org.apache.spark.sql.connector.catalog.TableProvider
public boolean supportsExternalMetadata()
supportsExternalMetadata
in interface org.apache.spark.sql.connector.catalog.TableProvider
public org.apache.spark.sql.connector.catalog.Table getTable(org.apache.spark.sql.types.StructType schema, org.apache.spark.sql.connector.expressions.Transform[] partitioning, Map<String,String> options)
getTable
in interface org.apache.spark.sql.connector.catalog.TableProvider
public String keyPrefix()
keyPrefix
in interface org.apache.spark.sql.connector.catalog.SessionConfigSupport
Copyright © 2019–2023 The Apache Software Foundation. All rights reserved.