Class SparkTable

  • All Implemented Interfaces:
    org.apache.spark.sql.connector.catalog.SupportsDelete, org.apache.spark.sql.connector.catalog.SupportsMetadataColumns, org.apache.spark.sql.connector.catalog.SupportsRead, org.apache.spark.sql.connector.catalog.SupportsWrite, org.apache.spark.sql.connector.catalog.Table, org.apache.spark.sql.connector.catalog.TruncatableTable, SupportsRowLevelOperations
    Direct Known Subclasses:
    StagedSparkTable

    public class SparkTable
    extends java.lang.Object
    implements org.apache.spark.sql.connector.catalog.Table, org.apache.spark.sql.connector.catalog.SupportsRead, org.apache.spark.sql.connector.catalog.SupportsWrite, org.apache.spark.sql.connector.catalog.SupportsDelete, SupportsRowLevelOperations, org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
    • Constructor Summary

      Constructors 
      Constructor Description
      SparkTable​(Table icebergTable, boolean refreshEagerly)  
      SparkTable​(Table icebergTable, java.lang.Long snapshotId, boolean refreshEagerly)  
    • Method Summary

      All Methods Instance Methods Concrete Methods 
      Modifier and Type Method Description
      boolean canDeleteWhere​(org.apache.spark.sql.sources.Filter[] filters)  
      java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability> capabilities()  
      void deleteWhere​(org.apache.spark.sql.sources.Filter[] filters)  
      boolean equals​(java.lang.Object other)  
      int hashCode()  
      org.apache.spark.sql.connector.catalog.MetadataColumn[] metadataColumns()  
      java.lang.String name()  
      RowLevelOperationBuilder newRowLevelOperationBuilder​(RowLevelOperationInfo info)
      Returns a RowLevelOperationBuilder to build a RowLevelOperation.
      org.apache.spark.sql.connector.read.ScanBuilder newScanBuilder​(org.apache.spark.sql.util.CaseInsensitiveStringMap options)  
      org.apache.spark.sql.connector.write.WriteBuilder newWriteBuilder​(org.apache.spark.sql.connector.write.LogicalWriteInfo info)  
      org.apache.spark.sql.connector.expressions.Transform[] partitioning()  
      java.util.Map<java.lang.String,​java.lang.String> properties()  
      org.apache.spark.sql.types.StructType schema()  
      Table table()  
      java.lang.String toString()  
      • Methods inherited from class java.lang.Object

        clone, finalize, getClass, notify, notifyAll, wait, wait, wait
      • Methods inherited from interface org.apache.spark.sql.connector.catalog.SupportsDelete

        truncateTable
    • Constructor Detail

      • SparkTable

        public SparkTable​(Table icebergTable,
                          boolean refreshEagerly)
      • SparkTable

        public SparkTable​(Table icebergTable,
                          java.lang.Long snapshotId,
                          boolean refreshEagerly)
    • Method Detail

      • table

        public Table table()
      • name

        public java.lang.String name()
        Specified by:
        name in interface org.apache.spark.sql.connector.catalog.Table
      • schema

        public org.apache.spark.sql.types.StructType schema()
        Specified by:
        schema in interface org.apache.spark.sql.connector.catalog.Table
      • partitioning

        public org.apache.spark.sql.connector.expressions.Transform[] partitioning()
        Specified by:
        partitioning in interface org.apache.spark.sql.connector.catalog.Table
      • properties

        public java.util.Map<java.lang.String,​java.lang.String> properties()
        Specified by:
        properties in interface org.apache.spark.sql.connector.catalog.Table
      • capabilities

        public java.util.Set<org.apache.spark.sql.connector.catalog.TableCapability> capabilities()
        Specified by:
        capabilities in interface org.apache.spark.sql.connector.catalog.Table
      • metadataColumns

        public org.apache.spark.sql.connector.catalog.MetadataColumn[] metadataColumns()
        Specified by:
        metadataColumns in interface org.apache.spark.sql.connector.catalog.SupportsMetadataColumns
      • newScanBuilder

        public org.apache.spark.sql.connector.read.ScanBuilder newScanBuilder​(org.apache.spark.sql.util.CaseInsensitiveStringMap options)
        Specified by:
        newScanBuilder in interface org.apache.spark.sql.connector.catalog.SupportsRead
      • newWriteBuilder

        public org.apache.spark.sql.connector.write.WriteBuilder newWriteBuilder​(org.apache.spark.sql.connector.write.LogicalWriteInfo info)
        Specified by:
        newWriteBuilder in interface org.apache.spark.sql.connector.catalog.SupportsWrite
      • canDeleteWhere

        public boolean canDeleteWhere​(org.apache.spark.sql.sources.Filter[] filters)
        Specified by:
        canDeleteWhere in interface org.apache.spark.sql.connector.catalog.SupportsDelete
      • deleteWhere

        public void deleteWhere​(org.apache.spark.sql.sources.Filter[] filters)
        Specified by:
        deleteWhere in interface org.apache.spark.sql.connector.catalog.SupportsDelete
      • toString

        public java.lang.String toString()
        Overrides:
        toString in class java.lang.Object
      • equals

        public boolean equals​(java.lang.Object other)
        Overrides:
        equals in class java.lang.Object
      • hashCode

        public int hashCode()
        Overrides:
        hashCode in class java.lang.Object