Package org.apache.iceberg.spark.source
Class SparkTable
java.lang.Object
org.apache.iceberg.spark.source.SparkTable
- All Implemented Interfaces:
org.apache.spark.sql.connector.catalog.SupportsDeleteV2,org.apache.spark.sql.connector.catalog.SupportsMetadataColumns,org.apache.spark.sql.connector.catalog.SupportsRead,org.apache.spark.sql.connector.catalog.SupportsRowLevelOperations,org.apache.spark.sql.connector.catalog.SupportsWrite,org.apache.spark.sql.connector.catalog.Table,org.apache.spark.sql.connector.catalog.TruncatableTable
- Direct Known Subclasses:
StagedSparkTable
public class SparkTable
extends Object
implements org.apache.spark.sql.connector.catalog.Table, org.apache.spark.sql.connector.catalog.SupportsRead, org.apache.spark.sql.connector.catalog.SupportsWrite, org.apache.spark.sql.connector.catalog.SupportsDeleteV2, org.apache.spark.sql.connector.catalog.SupportsRowLevelOperations, org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
-
Constructor Summary
ConstructorsConstructorDescriptionSparkTable(Table icebergTable, boolean refreshEagerly) SparkTable(Table icebergTable, Long snapshotId, boolean refreshEagerly) SparkTable(Table icebergTable, Long snapshotId, boolean refreshEagerly, boolean isTableRewrite) SparkTable(Table icebergTable, String branch, boolean refreshEagerly) -
Method Summary
Modifier and TypeMethodDescriptionbranch()booleancanDeleteWhere(org.apache.spark.sql.connector.expressions.filter.Predicate[] predicates) Set<org.apache.spark.sql.connector.catalog.TableCapability> copyWithBranch(String targetBranch) copyWithSnapshotId(long newSnapshotId) voiddeleteWhere(org.apache.spark.sql.connector.expressions.filter.Predicate[] predicates) booleaninthashCode()org.apache.spark.sql.connector.catalog.MetadataColumn[]name()org.apache.spark.sql.connector.write.RowLevelOperationBuildernewRowLevelOperationBuilder(org.apache.spark.sql.connector.write.RowLevelOperationInfo info) org.apache.spark.sql.connector.read.ScanBuildernewScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options) org.apache.spark.sql.connector.write.WriteBuildernewWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info) org.apache.spark.sql.connector.expressions.Transform[]org.apache.spark.sql.types.StructTypeschema()table()toString()Methods inherited from class java.lang.Object
clone, finalize, getClass, notify, notifyAll, wait, wait, waitMethods inherited from interface org.apache.spark.sql.connector.catalog.SupportsDeleteV2
truncateTableMethods inherited from interface org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
canRenameConflictingMetadataColumnsMethods inherited from interface org.apache.spark.sql.connector.catalog.Table
columns
-
Constructor Details
-
SparkTable
-
SparkTable
-
SparkTable
-
SparkTable
-
-
Method Details
-
table
-
name
- Specified by:
namein interfaceorg.apache.spark.sql.connector.catalog.Table
-
snapshotId
-
branch
-
copyWithSnapshotId
-
copyWithBranch
-
schema
public org.apache.spark.sql.types.StructType schema()- Specified by:
schemain interfaceorg.apache.spark.sql.connector.catalog.Table
-
partitioning
public org.apache.spark.sql.connector.expressions.Transform[] partitioning()- Specified by:
partitioningin interfaceorg.apache.spark.sql.connector.catalog.Table
-
properties
- Specified by:
propertiesin interfaceorg.apache.spark.sql.connector.catalog.Table
-
capabilities
- Specified by:
capabilitiesin interfaceorg.apache.spark.sql.connector.catalog.Table
-
metadataColumns
public org.apache.spark.sql.connector.catalog.MetadataColumn[] metadataColumns()- Specified by:
metadataColumnsin interfaceorg.apache.spark.sql.connector.catalog.SupportsMetadataColumns
-
newScanBuilder
public org.apache.spark.sql.connector.read.ScanBuilder newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options) - Specified by:
newScanBuilderin interfaceorg.apache.spark.sql.connector.catalog.SupportsRead
-
newWriteBuilder
public org.apache.spark.sql.connector.write.WriteBuilder newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info) - Specified by:
newWriteBuilderin interfaceorg.apache.spark.sql.connector.catalog.SupportsWrite
-
newRowLevelOperationBuilder
public org.apache.spark.sql.connector.write.RowLevelOperationBuilder newRowLevelOperationBuilder(org.apache.spark.sql.connector.write.RowLevelOperationInfo info) - Specified by:
newRowLevelOperationBuilderin interfaceorg.apache.spark.sql.connector.catalog.SupportsRowLevelOperations
-
canDeleteWhere
public boolean canDeleteWhere(org.apache.spark.sql.connector.expressions.filter.Predicate[] predicates) - Specified by:
canDeleteWherein interfaceorg.apache.spark.sql.connector.catalog.SupportsDeleteV2
-
deleteWhere
public void deleteWhere(org.apache.spark.sql.connector.expressions.filter.Predicate[] predicates) - Specified by:
deleteWherein interfaceorg.apache.spark.sql.connector.catalog.SupportsDeleteV2
-
toString
-
equals
-
hashCode
public int hashCode()
-