public class SparkTable
extends java.lang.Object
implements org.apache.spark.sql.connector.catalog.Table, org.apache.spark.sql.connector.catalog.SupportsRead, org.apache.spark.sql.connector.catalog.SupportsWrite, org.apache.spark.sql.connector.catalog.SupportsDelete, org.apache.spark.sql.connector.catalog.SupportsRowLevelOperations, org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
| Constructor and Description |
|---|
SparkTable(Table icebergTable,
boolean refreshEagerly) |
SparkTable(Table icebergTable,
java.lang.Long snapshotId,
boolean refreshEagerly) |
SparkTable(Table icebergTable,
java.lang.String branch,
boolean refreshEagerly) |
| Modifier and Type | Method and Description |
|---|---|
boolean |
canDeleteWhere(org.apache.spark.sql.sources.Filter[] filters) |
java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability> |
capabilities() |
SparkTable |
copyWithBranch(java.lang.String targetBranch) |
SparkTable |
copyWithSnapshotId(long newSnapshotId) |
void |
deleteWhere(org.apache.spark.sql.sources.Filter[] filters) |
boolean |
equals(java.lang.Object other) |
int |
hashCode() |
org.apache.spark.sql.connector.catalog.MetadataColumn[] |
metadataColumns() |
java.lang.String |
name() |
org.apache.spark.sql.connector.write.RowLevelOperationBuilder |
newRowLevelOperationBuilder(org.apache.spark.sql.connector.write.RowLevelOperationInfo info) |
org.apache.spark.sql.connector.read.ScanBuilder |
newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options) |
org.apache.spark.sql.connector.write.WriteBuilder |
newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info) |
org.apache.spark.sql.connector.expressions.Transform[] |
partitioning() |
java.util.Map<java.lang.String,java.lang.String> |
properties() |
org.apache.spark.sql.types.StructType |
schema() |
java.lang.Long |
snapshotId() |
Table |
table() |
java.lang.String |
toString() |
clone, finalize, getClass, notify, notifyAll, wait, wait, waitpublic SparkTable(Table icebergTable, boolean refreshEagerly)
public SparkTable(Table icebergTable, java.lang.String branch, boolean refreshEagerly)
public SparkTable(Table icebergTable, java.lang.Long snapshotId, boolean refreshEagerly)
public Table table()
public java.lang.String name()
name in interface org.apache.spark.sql.connector.catalog.Tablepublic java.lang.Long snapshotId()
public SparkTable copyWithSnapshotId(long newSnapshotId)
public SparkTable copyWithBranch(java.lang.String targetBranch)
public org.apache.spark.sql.types.StructType schema()
schema in interface org.apache.spark.sql.connector.catalog.Tablepublic org.apache.spark.sql.connector.expressions.Transform[] partitioning()
partitioning in interface org.apache.spark.sql.connector.catalog.Tablepublic java.util.Map<java.lang.String,java.lang.String> properties()
properties in interface org.apache.spark.sql.connector.catalog.Tablepublic java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability> capabilities()
capabilities in interface org.apache.spark.sql.connector.catalog.Tablepublic org.apache.spark.sql.connector.catalog.MetadataColumn[] metadataColumns()
metadataColumns in interface org.apache.spark.sql.connector.catalog.SupportsMetadataColumnspublic org.apache.spark.sql.connector.read.ScanBuilder newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options)
newScanBuilder in interface org.apache.spark.sql.connector.catalog.SupportsReadpublic org.apache.spark.sql.connector.write.WriteBuilder newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info)
newWriteBuilder in interface org.apache.spark.sql.connector.catalog.SupportsWritepublic org.apache.spark.sql.connector.write.RowLevelOperationBuilder newRowLevelOperationBuilder(org.apache.spark.sql.connector.write.RowLevelOperationInfo info)
newRowLevelOperationBuilder in interface org.apache.spark.sql.connector.catalog.SupportsRowLevelOperationspublic boolean canDeleteWhere(org.apache.spark.sql.sources.Filter[] filters)
canDeleteWhere in interface org.apache.spark.sql.connector.catalog.SupportsDeletepublic void deleteWhere(org.apache.spark.sql.sources.Filter[] filters)
deleteWhere in interface org.apache.spark.sql.connector.catalog.SupportsDeletepublic java.lang.String toString()
toString in class java.lang.Objectpublic boolean equals(java.lang.Object other)
equals in class java.lang.Objectpublic int hashCode()
hashCode in class java.lang.Object