public class SparkTable extends java.lang.Object implements org.apache.spark.sql.connector.catalog.Table, org.apache.spark.sql.connector.catalog.SupportsRead, org.apache.spark.sql.connector.catalog.SupportsWrite, org.apache.spark.sql.connector.catalog.SupportsDelete, SupportsRowLevelOperations, org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
Constructor and Description |
---|
SparkTable(Table icebergTable,
boolean refreshEagerly) |
SparkTable(Table icebergTable,
java.lang.Long snapshotId,
boolean refreshEagerly) |
Modifier and Type | Method and Description |
---|---|
boolean |
canDeleteWhere(org.apache.spark.sql.sources.Filter[] filters) |
java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability> |
capabilities() |
void |
deleteWhere(org.apache.spark.sql.sources.Filter[] filters) |
boolean |
equals(java.lang.Object other) |
int |
hashCode() |
org.apache.spark.sql.connector.catalog.MetadataColumn[] |
metadataColumns() |
java.lang.String |
name() |
RowLevelOperationBuilder |
newRowLevelOperationBuilder(RowLevelOperationInfo info)
Returns a RowLevelOperationBuilder to build a RowLevelOperation.
|
org.apache.spark.sql.connector.read.ScanBuilder |
newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options) |
org.apache.spark.sql.connector.write.WriteBuilder |
newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info) |
org.apache.spark.sql.connector.expressions.Transform[] |
partitioning() |
java.util.Map<java.lang.String,java.lang.String> |
properties() |
org.apache.spark.sql.types.StructType |
schema() |
Table |
table() |
java.lang.String |
toString() |
public SparkTable(Table icebergTable, boolean refreshEagerly)
public SparkTable(Table icebergTable, java.lang.Long snapshotId, boolean refreshEagerly)
public Table table()
public java.lang.String name()
name
in interface org.apache.spark.sql.connector.catalog.Table
public org.apache.spark.sql.types.StructType schema()
schema
in interface org.apache.spark.sql.connector.catalog.Table
public org.apache.spark.sql.connector.expressions.Transform[] partitioning()
partitioning
in interface org.apache.spark.sql.connector.catalog.Table
public java.util.Map<java.lang.String,java.lang.String> properties()
properties
in interface org.apache.spark.sql.connector.catalog.Table
public java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability> capabilities()
capabilities
in interface org.apache.spark.sql.connector.catalog.Table
public org.apache.spark.sql.connector.catalog.MetadataColumn[] metadataColumns()
metadataColumns
in interface org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
public org.apache.spark.sql.connector.read.ScanBuilder newScanBuilder(org.apache.spark.sql.util.CaseInsensitiveStringMap options)
newScanBuilder
in interface org.apache.spark.sql.connector.catalog.SupportsRead
public org.apache.spark.sql.connector.write.WriteBuilder newWriteBuilder(org.apache.spark.sql.connector.write.LogicalWriteInfo info)
newWriteBuilder
in interface org.apache.spark.sql.connector.catalog.SupportsWrite
public RowLevelOperationBuilder newRowLevelOperationBuilder(RowLevelOperationInfo info)
SupportsRowLevelOperations
newRowLevelOperationBuilder
in interface SupportsRowLevelOperations
info
- the row-level operation info such command (e.g. DELETE) and optionspublic boolean canDeleteWhere(org.apache.spark.sql.sources.Filter[] filters)
canDeleteWhere
in interface org.apache.spark.sql.connector.catalog.SupportsDelete
public void deleteWhere(org.apache.spark.sql.sources.Filter[] filters)
deleteWhere
in interface org.apache.spark.sql.connector.catalog.SupportsDelete
public java.lang.String toString()
toString
in class java.lang.Object
public boolean equals(java.lang.Object other)
equals
in class java.lang.Object
public int hashCode()
hashCode
in class java.lang.Object