public class SparkInternalRow
extends org.apache.spark.sql.catalyst.InternalRow
InternalRow
to wrap RowData
.Constructor and Description |
---|
SparkInternalRow(org.apache.flink.table.types.logical.RowType rowType) |
Modifier and Type | Method and Description |
---|---|
org.apache.spark.sql.catalyst.InternalRow |
copy() |
static org.apache.spark.sql.catalyst.util.ArrayData |
fromFlink(org.apache.flink.table.data.ArrayData array,
org.apache.flink.table.types.logical.ArrayType arrayType) |
static org.apache.spark.sql.types.Decimal |
fromFlink(org.apache.flink.table.data.DecimalData decimal) |
static org.apache.spark.sql.catalyst.util.MapData |
fromFlink(org.apache.flink.table.data.MapData map,
org.apache.flink.table.types.logical.LogicalType mapType) |
static Object |
fromFlink(Object o,
org.apache.flink.table.types.logical.LogicalType type) |
static org.apache.spark.sql.catalyst.InternalRow |
fromFlink(org.apache.flink.table.data.RowData row,
org.apache.flink.table.types.logical.RowType rowType) |
static org.apache.spark.unsafe.types.UTF8String |
fromFlink(org.apache.flink.table.data.StringData string) |
static long |
fromFlink(org.apache.flink.table.data.TimestampData timestamp) |
Object |
get(int ordinal,
org.apache.spark.sql.types.DataType dataType) |
org.apache.spark.sql.catalyst.util.ArrayData |
getArray(int ordinal) |
byte[] |
getBinary(int ordinal) |
boolean |
getBoolean(int ordinal) |
byte |
getByte(int ordinal) |
org.apache.spark.sql.types.Decimal |
getDecimal(int ordinal,
int precision,
int scale) |
double |
getDouble(int ordinal) |
float |
getFloat(int ordinal) |
int |
getInt(int ordinal) |
org.apache.spark.unsafe.types.CalendarInterval |
getInterval(int ordinal) |
long |
getLong(int ordinal) |
org.apache.spark.sql.catalyst.util.MapData |
getMap(int ordinal) |
short |
getShort(int ordinal) |
org.apache.spark.sql.catalyst.InternalRow |
getStruct(int ordinal,
int numFields) |
org.apache.spark.unsafe.types.UTF8String |
getUTF8String(int ordinal) |
boolean |
isNullAt(int ordinal) |
int |
numFields() |
SparkInternalRow |
replace(org.apache.flink.table.data.RowData row) |
void |
setNullAt(int i) |
void |
update(int i,
Object value) |
public SparkInternalRow(org.apache.flink.table.types.logical.RowType rowType)
public SparkInternalRow replace(org.apache.flink.table.data.RowData row)
public int numFields()
numFields
in class org.apache.spark.sql.catalyst.InternalRow
public void setNullAt(int i)
setNullAt
in class org.apache.spark.sql.catalyst.InternalRow
public void update(int i, Object value)
update
in class org.apache.spark.sql.catalyst.InternalRow
public org.apache.spark.sql.catalyst.InternalRow copy()
copy
in class org.apache.spark.sql.catalyst.InternalRow
public boolean isNullAt(int ordinal)
public boolean getBoolean(int ordinal)
public byte getByte(int ordinal)
public short getShort(int ordinal)
public int getInt(int ordinal)
public long getLong(int ordinal)
public float getFloat(int ordinal)
public double getDouble(int ordinal)
public org.apache.spark.sql.types.Decimal getDecimal(int ordinal, int precision, int scale)
public org.apache.spark.unsafe.types.UTF8String getUTF8String(int ordinal)
public byte[] getBinary(int ordinal)
public org.apache.spark.unsafe.types.CalendarInterval getInterval(int ordinal)
public org.apache.spark.sql.catalyst.InternalRow getStruct(int ordinal, int numFields)
public org.apache.spark.sql.catalyst.util.ArrayData getArray(int ordinal)
public org.apache.spark.sql.catalyst.util.MapData getMap(int ordinal)
public Object get(int ordinal, org.apache.spark.sql.types.DataType dataType)
public static Object fromFlink(Object o, org.apache.flink.table.types.logical.LogicalType type)
public static org.apache.spark.unsafe.types.UTF8String fromFlink(org.apache.flink.table.data.StringData string)
public static org.apache.spark.sql.types.Decimal fromFlink(org.apache.flink.table.data.DecimalData decimal)
public static org.apache.spark.sql.catalyst.InternalRow fromFlink(org.apache.flink.table.data.RowData row, org.apache.flink.table.types.logical.RowType rowType)
public static long fromFlink(org.apache.flink.table.data.TimestampData timestamp)
public static org.apache.spark.sql.catalyst.util.ArrayData fromFlink(org.apache.flink.table.data.ArrayData array, org.apache.flink.table.types.logical.ArrayType arrayType)
public static org.apache.spark.sql.catalyst.util.MapData fromFlink(org.apache.flink.table.data.MapData map, org.apache.flink.table.types.logical.LogicalType mapType)
Copyright © 2019–2023 The Apache Software Foundation. All rights reserved.