Package org.apache.iceberg.spark.source
Class SparkTable
- java.lang.Object
-
- org.apache.iceberg.spark.source.SparkTable
-
- All Implemented Interfaces:
org.apache.spark.sql.connector.catalog.SupportsDelete
,org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
,org.apache.spark.sql.connector.catalog.SupportsRead
,org.apache.spark.sql.connector.catalog.SupportsRowLevelOperations
,org.apache.spark.sql.connector.catalog.SupportsWrite
,org.apache.spark.sql.connector.catalog.Table
,org.apache.spark.sql.connector.catalog.TruncatableTable
- Direct Known Subclasses:
StagedSparkTable
public class SparkTable extends java.lang.Object implements org.apache.spark.sql.connector.catalog.Table, org.apache.spark.sql.connector.catalog.SupportsRead, org.apache.spark.sql.connector.catalog.SupportsWrite, org.apache.spark.sql.connector.catalog.SupportsDelete, org.apache.spark.sql.connector.catalog.SupportsRowLevelOperations, org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
-
-
Constructor Summary
Constructors Constructor Description SparkTable(Table icebergTable, boolean refreshEagerly)
SparkTable(Table icebergTable, java.lang.Long snapshotId, boolean refreshEagerly)
SparkTable(Table icebergTable, java.lang.String branch, boolean refreshEagerly)
-
Method Summary
All Methods Instance Methods Concrete Methods Modifier and Type Method Description boolean
canDeleteWhere(org.apache.spark.sql.sources.Filter[] filters)
java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability>
capabilities()
SparkTable
copyWithBranch(java.lang.String targetBranch)
SparkTable
copyWithSnapshotId(long newSnapshotId)
void
deleteWhere(org.apache.spark.sql.sources.Filter[] filters)
boolean
equals(java.lang.Object other)
int
hashCode()
org.apache.spark.sql.connector.catalog.MetadataColumn[]
metadataColumns()
java.lang.String
name()
org.apache.spark.sql.connector.write.RowLevelOperationBuilder
newRowLevelOperationBuilder(org.apache.spark.sql.connector.write.RowLevelOperationInfo info)
org.apache.spark.sql.connector.read.ScanBuilder
newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options)
org.apache.spark.sql.connector.write.WriteBuilder
newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info)
org.apache.spark.sql.connector.expressions.Transform[]
partitioning()
java.util.Map<java.lang.String,java.lang.String>
properties()
org.apache.spark.sql.types.StructType
schema()
java.lang.Long
snapshotId()
Table
table()
java.lang.String
toString()
-
-
-
Method Detail
-
table
public Table table()
-
name
public java.lang.String name()
- Specified by:
name
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
snapshotId
public java.lang.Long snapshotId()
-
copyWithSnapshotId
public SparkTable copyWithSnapshotId(long newSnapshotId)
-
copyWithBranch
public SparkTable copyWithBranch(java.lang.String targetBranch)
-
schema
public org.apache.spark.sql.types.StructType schema()
- Specified by:
schema
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
partitioning
public org.apache.spark.sql.connector.expressions.Transform[] partitioning()
- Specified by:
partitioning
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
properties
public java.util.Map<java.lang.String,java.lang.String> properties()
- Specified by:
properties
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
capabilities
public java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability> capabilities()
- Specified by:
capabilities
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
metadataColumns
public org.apache.spark.sql.connector.catalog.MetadataColumn[] metadataColumns()
- Specified by:
metadataColumns
in interfaceorg.apache.spark.sql.connector.catalog.SupportsMetadataColumns
-
newScanBuilder
public org.apache.spark.sql.connector.read.ScanBuilder newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options)
- Specified by:
newScanBuilder
in interfaceorg.apache.spark.sql.connector.catalog.SupportsRead
-
newWriteBuilder
public org.apache.spark.sql.connector.write.WriteBuilder newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info)
- Specified by:
newWriteBuilder
in interfaceorg.apache.spark.sql.connector.catalog.SupportsWrite
-
newRowLevelOperationBuilder
public org.apache.spark.sql.connector.write.RowLevelOperationBuilder newRowLevelOperationBuilder(org.apache.spark.sql.connector.write.RowLevelOperationInfo info)
- Specified by:
newRowLevelOperationBuilder
in interfaceorg.apache.spark.sql.connector.catalog.SupportsRowLevelOperations
-
canDeleteWhere
public boolean canDeleteWhere(org.apache.spark.sql.sources.Filter[] filters)
- Specified by:
canDeleteWhere
in interfaceorg.apache.spark.sql.connector.catalog.SupportsDelete
-
deleteWhere
public void deleteWhere(org.apache.spark.sql.sources.Filter[] filters)
- Specified by:
deleteWhere
in interfaceorg.apache.spark.sql.connector.catalog.SupportsDelete
-
toString
public java.lang.String toString()
- Overrides:
toString
in classjava.lang.Object
-
equals
public boolean equals(java.lang.Object other)
- Overrides:
equals
in classjava.lang.Object
-
hashCode
public int hashCode()
- Overrides:
hashCode
in classjava.lang.Object
-
-