public class SparkTable extends java.lang.Object implements org.apache.spark.sql.connector.catalog.Table, org.apache.spark.sql.connector.catalog.SupportsRead, org.apache.spark.sql.connector.catalog.SupportsWrite, org.apache.spark.sql.connector.catalog.SupportsDelete, SupportsRowLevelOperations, org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
| Constructor and Description |
|---|
SparkTable(Table icebergTable,
boolean refreshEagerly) |
SparkTable(Table icebergTable,
java.lang.Long snapshotId,
boolean refreshEagerly) |
| Modifier and Type | Method and Description |
|---|---|
boolean |
canDeleteWhere(org.apache.spark.sql.sources.Filter[] filters) |
java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability> |
capabilities() |
void |
deleteWhere(org.apache.spark.sql.sources.Filter[] filters) |
boolean |
equals(java.lang.Object other) |
int |
hashCode() |
org.apache.spark.sql.connector.catalog.MetadataColumn[] |
metadataColumns() |
java.lang.String |
name() |
RowLevelOperationBuilder |
newRowLevelOperationBuilder(RowLevelOperationInfo info)
Returns a RowLevelOperationBuilder to build a RowLevelOperation.
|
org.apache.spark.sql.connector.read.ScanBuilder |
newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options) |
org.apache.spark.sql.connector.write.WriteBuilder |
newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info) |
org.apache.spark.sql.connector.expressions.Transform[] |
partitioning() |
java.util.Map<java.lang.String,java.lang.String> |
properties() |
org.apache.spark.sql.types.StructType |
schema() |
Table |
table() |
java.lang.String |
toString() |
public SparkTable(Table icebergTable, boolean refreshEagerly)
public SparkTable(Table icebergTable, java.lang.Long snapshotId, boolean refreshEagerly)
public Table table()
public java.lang.String name()
name in interface org.apache.spark.sql.connector.catalog.Tablepublic org.apache.spark.sql.types.StructType schema()
schema in interface org.apache.spark.sql.connector.catalog.Tablepublic org.apache.spark.sql.connector.expressions.Transform[] partitioning()
partitioning in interface org.apache.spark.sql.connector.catalog.Tablepublic java.util.Map<java.lang.String,java.lang.String> properties()
properties in interface org.apache.spark.sql.connector.catalog.Tablepublic java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability> capabilities()
capabilities in interface org.apache.spark.sql.connector.catalog.Tablepublic org.apache.spark.sql.connector.catalog.MetadataColumn[] metadataColumns()
metadataColumns in interface org.apache.spark.sql.connector.catalog.SupportsMetadataColumnspublic org.apache.spark.sql.connector.read.ScanBuilder newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options)
newScanBuilder in interface org.apache.spark.sql.connector.catalog.SupportsReadpublic org.apache.spark.sql.connector.write.WriteBuilder newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info)
newWriteBuilder in interface org.apache.spark.sql.connector.catalog.SupportsWritepublic RowLevelOperationBuilder newRowLevelOperationBuilder(RowLevelOperationInfo info)
SupportsRowLevelOperationsnewRowLevelOperationBuilder in interface SupportsRowLevelOperationsinfo - the row-level operation info such command (e.g. DELETE) and optionspublic boolean canDeleteWhere(org.apache.spark.sql.sources.Filter[] filters)
canDeleteWhere in interface org.apache.spark.sql.connector.catalog.SupportsDeletepublic void deleteWhere(org.apache.spark.sql.sources.Filter[] filters)
deleteWhere in interface org.apache.spark.sql.connector.catalog.SupportsDeletepublic java.lang.String toString()
toString in class java.lang.Objectpublic boolean equals(java.lang.Object other)
equals in class java.lang.Objectpublic int hashCode()
hashCode in class java.lang.Object