Package org.apache.iceberg.spark
Class SparkCachedTableCatalog
- java.lang.Object
-
- org.apache.iceberg.spark.SparkCachedTableCatalog
-
- All Implemented Interfaces:
org.apache.spark.sql.connector.catalog.CatalogPlugin
,org.apache.spark.sql.connector.catalog.TableCatalog
public class SparkCachedTableCatalog extends java.lang.Object implements org.apache.spark.sql.connector.catalog.TableCatalog
An internal table catalog that is capable of loading tables from a cache.
-
-
Constructor Summary
Constructors Constructor Description SparkCachedTableCatalog()
-
Method Summary
All Methods Instance Methods Concrete Methods Modifier and Type Method Description SparkTable
alterTable(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.connector.catalog.TableChange... changes)
SparkTable
createTable(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.types.StructType schema, org.apache.spark.sql.connector.expressions.Transform[] partitions, java.util.Map<java.lang.String,java.lang.String> properties)
boolean
dropTable(org.apache.spark.sql.connector.catalog.Identifier ident)
void
initialize(java.lang.String catalogName, org.apache.spark.sql.util.CaseInsensitiveStringMap options)
void
invalidateTable(org.apache.spark.sql.connector.catalog.Identifier ident)
org.apache.spark.sql.connector.catalog.Identifier[]
listTables(java.lang.String[] namespace)
SparkTable
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident)
SparkTable
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident, long timestampMicros)
SparkTable
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident, java.lang.String version)
java.lang.String
name()
boolean
purgeTable(org.apache.spark.sql.connector.catalog.Identifier ident)
void
renameTable(org.apache.spark.sql.connector.catalog.Identifier oldIdent, org.apache.spark.sql.connector.catalog.Identifier newIdent)
-
Methods inherited from class java.lang.Object
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
-
-
-
-
Method Detail
-
listTables
public org.apache.spark.sql.connector.catalog.Identifier[] listTables(java.lang.String[] namespace)
- Specified by:
listTables
in interfaceorg.apache.spark.sql.connector.catalog.TableCatalog
-
loadTable
public SparkTable loadTable(org.apache.spark.sql.connector.catalog.Identifier ident) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
- Specified by:
loadTable
in interfaceorg.apache.spark.sql.connector.catalog.TableCatalog
- Throws:
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
-
loadTable
public SparkTable loadTable(org.apache.spark.sql.connector.catalog.Identifier ident, java.lang.String version) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
- Specified by:
loadTable
in interfaceorg.apache.spark.sql.connector.catalog.TableCatalog
- Throws:
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
-
loadTable
public SparkTable loadTable(org.apache.spark.sql.connector.catalog.Identifier ident, long timestampMicros) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
- Specified by:
loadTable
in interfaceorg.apache.spark.sql.connector.catalog.TableCatalog
- Throws:
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
-
invalidateTable
public void invalidateTable(org.apache.spark.sql.connector.catalog.Identifier ident)
- Specified by:
invalidateTable
in interfaceorg.apache.spark.sql.connector.catalog.TableCatalog
-
createTable
public SparkTable createTable(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.types.StructType schema, org.apache.spark.sql.connector.expressions.Transform[] partitions, java.util.Map<java.lang.String,java.lang.String> properties) throws org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
- Specified by:
createTable
in interfaceorg.apache.spark.sql.connector.catalog.TableCatalog
- Throws:
org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
-
alterTable
public SparkTable alterTable(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.connector.catalog.TableChange... changes)
- Specified by:
alterTable
in interfaceorg.apache.spark.sql.connector.catalog.TableCatalog
-
dropTable
public boolean dropTable(org.apache.spark.sql.connector.catalog.Identifier ident)
- Specified by:
dropTable
in interfaceorg.apache.spark.sql.connector.catalog.TableCatalog
-
purgeTable
public boolean purgeTable(org.apache.spark.sql.connector.catalog.Identifier ident) throws java.lang.UnsupportedOperationException
- Specified by:
purgeTable
in interfaceorg.apache.spark.sql.connector.catalog.TableCatalog
- Throws:
java.lang.UnsupportedOperationException
-
renameTable
public void renameTable(org.apache.spark.sql.connector.catalog.Identifier oldIdent, org.apache.spark.sql.connector.catalog.Identifier newIdent)
- Specified by:
renameTable
in interfaceorg.apache.spark.sql.connector.catalog.TableCatalog
-
initialize
public void initialize(java.lang.String catalogName, org.apache.spark.sql.util.CaseInsensitiveStringMap options)
- Specified by:
initialize
in interfaceorg.apache.spark.sql.connector.catalog.CatalogPlugin
-
name
public java.lang.String name()
- Specified by:
name
in interfaceorg.apache.spark.sql.connector.catalog.CatalogPlugin
-
-