Package org.apache.iceberg.spark.source
Class SparkTable
java.lang.Object
org.apache.iceberg.spark.source.SparkTable
- All Implemented Interfaces:
org.apache.spark.sql.connector.catalog.SupportsDeleteV2
,org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
,org.apache.spark.sql.connector.catalog.SupportsRead
,org.apache.spark.sql.connector.catalog.SupportsRowLevelOperations
,org.apache.spark.sql.connector.catalog.SupportsWrite
,org.apache.spark.sql.connector.catalog.Table
,org.apache.spark.sql.connector.catalog.TruncatableTable
- Direct Known Subclasses:
StagedSparkTable
public class SparkTable
extends Object
implements org.apache.spark.sql.connector.catalog.Table, org.apache.spark.sql.connector.catalog.SupportsRead, org.apache.spark.sql.connector.catalog.SupportsWrite, org.apache.spark.sql.connector.catalog.SupportsDeleteV2, org.apache.spark.sql.connector.catalog.SupportsRowLevelOperations, org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
-
Constructor Summary
ConstructorDescriptionSparkTable
(Table icebergTable, boolean refreshEagerly) SparkTable
(Table icebergTable, Long snapshotId, boolean refreshEagerly) SparkTable
(Table icebergTable, String branch, boolean refreshEagerly) -
Method Summary
Modifier and TypeMethodDescriptionbranch()
boolean
canDeleteWhere
(org.apache.spark.sql.connector.expressions.filter.Predicate[] predicates) Set
<org.apache.spark.sql.connector.catalog.TableCapability> copyWithBranch
(String targetBranch) copyWithSnapshotId
(long newSnapshotId) void
deleteWhere
(org.apache.spark.sql.connector.expressions.filter.Predicate[] predicates) boolean
int
hashCode()
org.apache.spark.sql.connector.catalog.MetadataColumn[]
name()
org.apache.spark.sql.connector.write.RowLevelOperationBuilder
newRowLevelOperationBuilder
(org.apache.spark.sql.connector.write.RowLevelOperationInfo info) org.apache.spark.sql.connector.read.ScanBuilder
newScanBuilder
(org.apache.spark.sql.util.CaseInsensitiveStringMap options) org.apache.spark.sql.connector.write.WriteBuilder
newWriteBuilder
(org.apache.spark.sql.connector.write.LogicalWriteInfo info) org.apache.spark.sql.connector.expressions.Transform[]
org.apache.spark.sql.types.StructType
schema()
table()
toString()
Methods inherited from class java.lang.Object
clone, finalize, getClass, notify, notifyAll, wait, wait, wait
Methods inherited from interface org.apache.spark.sql.connector.catalog.SupportsDeleteV2
truncateTable
Methods inherited from interface org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
canRenameConflictingMetadataColumns
Methods inherited from interface org.apache.spark.sql.connector.catalog.Table
columns
-
Constructor Details
-
SparkTable
-
SparkTable
-
SparkTable
-
-
Method Details
-
table
-
name
- Specified by:
name
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
snapshotId
-
branch
-
copyWithSnapshotId
-
copyWithBranch
-
schema
public org.apache.spark.sql.types.StructType schema()- Specified by:
schema
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
partitioning
public org.apache.spark.sql.connector.expressions.Transform[] partitioning()- Specified by:
partitioning
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
properties
- Specified by:
properties
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
capabilities
- Specified by:
capabilities
in interfaceorg.apache.spark.sql.connector.catalog.Table
-
metadataColumns
public org.apache.spark.sql.connector.catalog.MetadataColumn[] metadataColumns()- Specified by:
metadataColumns
in interfaceorg.apache.spark.sql.connector.catalog.SupportsMetadataColumns
-
newScanBuilder
public org.apache.spark.sql.connector.read.ScanBuilder newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options) - Specified by:
newScanBuilder
in interfaceorg.apache.spark.sql.connector.catalog.SupportsRead
-
newWriteBuilder
public org.apache.spark.sql.connector.write.WriteBuilder newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info) - Specified by:
newWriteBuilder
in interfaceorg.apache.spark.sql.connector.catalog.SupportsWrite
-
newRowLevelOperationBuilder
public org.apache.spark.sql.connector.write.RowLevelOperationBuilder newRowLevelOperationBuilder(org.apache.spark.sql.connector.write.RowLevelOperationInfo info) - Specified by:
newRowLevelOperationBuilder
in interfaceorg.apache.spark.sql.connector.catalog.SupportsRowLevelOperations
-
canDeleteWhere
public boolean canDeleteWhere(org.apache.spark.sql.connector.expressions.filter.Predicate[] predicates) - Specified by:
canDeleteWhere
in interfaceorg.apache.spark.sql.connector.catalog.SupportsDeleteV2
-
deleteWhere
public void deleteWhere(org.apache.spark.sql.connector.expressions.filter.Predicate[] predicates) - Specified by:
deleteWhere
in interfaceorg.apache.spark.sql.connector.catalog.SupportsDeleteV2
-
toString
-
equals
-
hashCode
public int hashCode()
-