Package org.apache.iceberg.spark.source
Class SparkTable
- java.lang.Object
-
- org.apache.iceberg.spark.source.SparkTable
-
- All Implemented Interfaces:
org.apache.spark.sql.connector.catalog.SupportsDelete
,org.apache.spark.sql.connector.catalog.SupportsRead
,org.apache.spark.sql.connector.catalog.SupportsWrite
,org.apache.spark.sql.connector.catalog.Table
- Direct Known Subclasses:
StagedSparkTable
public class SparkTable extends java.lang.Object implements org.apache.spark.sql.connector.catalog.Table, org.apache.spark.sql.connector.catalog.SupportsRead, org.apache.spark.sql.connector.catalog.SupportsWrite, org.apache.spark.sql.connector.catalog.SupportsDelete
-
-
Constructor Summary
Constructors Constructor Description SparkTable(Table icebergTable)
SparkTable(Table icebergTable, org.apache.spark.sql.types.StructType requestedSchema)
-
Method Summary
All Methods Instance Methods Concrete Methods Modifier and Type Method Description java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability>
capabilities()
void
deleteWhere(org.apache.spark.sql.sources.Filter[] filters)
java.lang.String
name()
org.apache.spark.sql.connector.read.ScanBuilder
newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options)
org.apache.spark.sql.connector.write.WriteBuilder
newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info)
org.apache.spark.sql.connector.expressions.Transform[]
partitioning()
java.util.Map<java.lang.String,java.lang.String>
properties()
org.apache.spark.sql.types.StructType
schema()
Table
table()
java.lang.String
toString()
-
-
-
Method Detail
-
table
public Table table()
-
name
public java.lang.String name()
- Specified by:
name
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
schema
public org.apache.spark.sql.types.StructType schema()
- Specified by:
schema
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
partitioning
public org.apache.spark.sql.connector.expressions.Transform[] partitioning()
- Specified by:
partitioning
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
properties
public java.util.Map<java.lang.String,java.lang.String> properties()
- Specified by:
properties
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
capabilities
public java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability> capabilities()
- Specified by:
capabilities
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
newScanBuilder
public org.apache.spark.sql.connector.read.ScanBuilder newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options)
- Specified by:
newScanBuilder
in interfaceorg.apache.spark.sql.connector.catalog.SupportsRead
-
newWriteBuilder
public org.apache.spark.sql.connector.write.WriteBuilder newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info)
- Specified by:
newWriteBuilder
in interfaceorg.apache.spark.sql.connector.catalog.SupportsWrite
-
deleteWhere
public void deleteWhere(org.apache.spark.sql.sources.Filter[] filters)
- Specified by:
deleteWhere
in interfaceorg.apache.spark.sql.connector.catalog.SupportsDelete
-
toString
public java.lang.String toString()
- Overrides:
toString
in classjava.lang.Object
-
-