T
- CatalogPlugin class to avoid casting to TableCatalog, FunctionCatalog and
SupportsNamespaces.public class SparkSessionCatalog<T extends org.apache.spark.sql.connector.catalog.TableCatalog & org.apache.spark.sql.connector.catalog.FunctionCatalog & org.apache.spark.sql.connector.catalog.SupportsNamespaces>
extends java.lang.Object
implements org.apache.spark.sql.connector.catalog.CatalogExtension
Constructor and Description |
---|
SparkSessionCatalog() |
Modifier and Type | Method and Description |
---|---|
void |
alterNamespace(java.lang.String[] namespace,
org.apache.spark.sql.connector.catalog.NamespaceChange... changes) |
org.apache.spark.sql.connector.catalog.Table |
alterTable(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.connector.catalog.TableChange... changes) |
protected org.apache.spark.sql.connector.catalog.TableCatalog |
buildSparkCatalog(java.lang.String name,
org.apache.spark.sql.util.CaseInsensitiveStringMap options)
Build a
SparkCatalog to be used for Iceberg operations. |
void |
createNamespace(java.lang.String[] namespace,
java.util.Map<java.lang.String,java.lang.String> metadata) |
org.apache.spark.sql.connector.catalog.Table |
createTable(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.types.StructType schema,
org.apache.spark.sql.connector.expressions.Transform[] partitions,
java.util.Map<java.lang.String,java.lang.String> properties) |
java.lang.String[] |
defaultNamespace() |
boolean |
dropNamespace(java.lang.String[] namespace,
boolean cascade) |
boolean |
dropTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
Catalog |
icebergCatalog()
Returns the underlying
Catalog backing this Spark Catalog |
void |
initialize(java.lang.String name,
org.apache.spark.sql.util.CaseInsensitiveStringMap options) |
void |
invalidateTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
boolean |
isExistingNamespace(java.lang.String[] namespace) |
boolean |
isFunctionNamespace(java.lang.String[] namespace) |
default org.apache.spark.sql.connector.catalog.Identifier[] |
listFunctions(java.lang.String[] namespace) |
java.lang.String[][] |
listNamespaces() |
java.lang.String[][] |
listNamespaces(java.lang.String[] namespace) |
org.apache.spark.sql.connector.catalog.Identifier[] |
listTables(java.lang.String[] namespace) |
org.apache.spark.sql.connector.catalog.functions.UnboundFunction |
loadFunction(org.apache.spark.sql.connector.catalog.Identifier ident) |
java.util.Map<java.lang.String,java.lang.String> |
loadNamespaceMetadata(java.lang.String[] namespace) |
Procedure |
loadProcedure(org.apache.spark.sql.connector.catalog.Identifier ident)
Load a
stored procedure by identifier . |
org.apache.spark.sql.connector.catalog.Table |
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
org.apache.spark.sql.connector.catalog.Table |
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident,
long timestamp) |
org.apache.spark.sql.connector.catalog.Table |
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident,
java.lang.String version) |
java.lang.String |
name() |
boolean |
namespaceExists(java.lang.String[] namespace) |
boolean |
purgeTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
void |
renameTable(org.apache.spark.sql.connector.catalog.Identifier from,
org.apache.spark.sql.connector.catalog.Identifier to) |
void |
setDelegateCatalog(org.apache.spark.sql.connector.catalog.CatalogPlugin sparkSessionCatalog) |
org.apache.spark.sql.connector.catalog.StagedTable |
stageCreate(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.types.StructType schema,
org.apache.spark.sql.connector.expressions.Transform[] partitions,
java.util.Map<java.lang.String,java.lang.String> properties) |
org.apache.spark.sql.connector.catalog.StagedTable |
stageCreateOrReplace(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.types.StructType schema,
org.apache.spark.sql.connector.expressions.Transform[] partitions,
java.util.Map<java.lang.String,java.lang.String> properties) |
org.apache.spark.sql.connector.catalog.StagedTable |
stageReplace(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.types.StructType schema,
org.apache.spark.sql.connector.expressions.Transform[] partitions,
java.util.Map<java.lang.String,java.lang.String> properties) |
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
capabilities, createTable, tableExists, useNullableQuerySchema
protected org.apache.spark.sql.connector.catalog.TableCatalog buildSparkCatalog(java.lang.String name, org.apache.spark.sql.util.CaseInsensitiveStringMap options)
SparkCatalog
to be used for Iceberg operations.
The default implementation creates a new SparkCatalog with the session catalog's name and options.
name
- catalog nameoptions
- catalog optionspublic java.lang.String[] defaultNamespace()
defaultNamespace
in interface org.apache.spark.sql.connector.catalog.CatalogPlugin
public java.lang.String[][] listNamespaces() throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
listNamespaces
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public java.lang.String[][] listNamespaces(java.lang.String[] namespace) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
listNamespaces
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public boolean namespaceExists(java.lang.String[] namespace)
namespaceExists
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
public java.util.Map<java.lang.String,java.lang.String> loadNamespaceMetadata(java.lang.String[] namespace) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
loadNamespaceMetadata
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public void createNamespace(java.lang.String[] namespace, java.util.Map<java.lang.String,java.lang.String> metadata) throws org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException
createNamespace
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException
public void alterNamespace(java.lang.String[] namespace, org.apache.spark.sql.connector.catalog.NamespaceChange... changes) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
alterNamespace
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public boolean dropNamespace(java.lang.String[] namespace, boolean cascade) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException, org.apache.spark.sql.catalyst.analysis.NonEmptyNamespaceException
dropNamespace
in interface org.apache.spark.sql.connector.catalog.SupportsNamespaces
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
org.apache.spark.sql.catalyst.analysis.NonEmptyNamespaceException
public org.apache.spark.sql.connector.catalog.Identifier[] listTables(java.lang.String[] namespace) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
listTables
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public org.apache.spark.sql.connector.catalog.Table loadTable(org.apache.spark.sql.connector.catalog.Identifier ident) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
loadTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
public org.apache.spark.sql.connector.catalog.Table loadTable(org.apache.spark.sql.connector.catalog.Identifier ident, java.lang.String version) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
loadTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
public org.apache.spark.sql.connector.catalog.Table loadTable(org.apache.spark.sql.connector.catalog.Identifier ident, long timestamp) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
loadTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
public void invalidateTable(org.apache.spark.sql.connector.catalog.Identifier ident)
invalidateTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
public org.apache.spark.sql.connector.catalog.Table createTable(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.types.StructType schema, org.apache.spark.sql.connector.expressions.Transform[] partitions, java.util.Map<java.lang.String,java.lang.String> properties) throws org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException, org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
createTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public org.apache.spark.sql.connector.catalog.StagedTable stageCreate(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.types.StructType schema, org.apache.spark.sql.connector.expressions.Transform[] partitions, java.util.Map<java.lang.String,java.lang.String> properties) throws org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException, org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
stageCreate
in interface org.apache.spark.sql.connector.catalog.StagingTableCatalog
org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public org.apache.spark.sql.connector.catalog.StagedTable stageReplace(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.types.StructType schema, org.apache.spark.sql.connector.expressions.Transform[] partitions, java.util.Map<java.lang.String,java.lang.String> properties) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException, org.apache.spark.sql.catalyst.analysis.NoSuchTableException
stageReplace
in interface org.apache.spark.sql.connector.catalog.StagingTableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
public org.apache.spark.sql.connector.catalog.StagedTable stageCreateOrReplace(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.types.StructType schema, org.apache.spark.sql.connector.expressions.Transform[] partitions, java.util.Map<java.lang.String,java.lang.String> properties) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
stageCreateOrReplace
in interface org.apache.spark.sql.connector.catalog.StagingTableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
public org.apache.spark.sql.connector.catalog.Table alterTable(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.connector.catalog.TableChange... changes) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
alterTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
public boolean dropTable(org.apache.spark.sql.connector.catalog.Identifier ident)
dropTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
public boolean purgeTable(org.apache.spark.sql.connector.catalog.Identifier ident)
purgeTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
public void renameTable(org.apache.spark.sql.connector.catalog.Identifier from, org.apache.spark.sql.connector.catalog.Identifier to) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException, org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
renameTable
in interface org.apache.spark.sql.connector.catalog.TableCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchTableException
org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
public final void initialize(java.lang.String name, org.apache.spark.sql.util.CaseInsensitiveStringMap options)
initialize
in interface org.apache.spark.sql.connector.catalog.CatalogPlugin
public void setDelegateCatalog(org.apache.spark.sql.connector.catalog.CatalogPlugin sparkSessionCatalog)
setDelegateCatalog
in interface org.apache.spark.sql.connector.catalog.CatalogExtension
public java.lang.String name()
name
in interface org.apache.spark.sql.connector.catalog.CatalogPlugin
public Catalog icebergCatalog()
HasIcebergCatalog
Catalog
backing this Spark CatalogicebergCatalog
in interface HasIcebergCatalog
public org.apache.spark.sql.connector.catalog.functions.UnboundFunction loadFunction(org.apache.spark.sql.connector.catalog.Identifier ident) throws org.apache.spark.sql.catalyst.analysis.NoSuchFunctionException
loadFunction
in interface org.apache.spark.sql.connector.catalog.FunctionCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchFunctionException
public Procedure loadProcedure(org.apache.spark.sql.connector.catalog.Identifier ident) throws NoSuchProcedureException
ProcedureCatalog
stored procedure
by identifier
.loadProcedure
in interface ProcedureCatalog
ident
- a stored procedure identifierNoSuchProcedureException
- if there is no matching stored procedurepublic boolean isFunctionNamespace(java.lang.String[] namespace)
public boolean isExistingNamespace(java.lang.String[] namespace)
public org.apache.spark.sql.connector.catalog.Identifier[] listFunctions(java.lang.String[] namespace) throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
listFunctions
in interface org.apache.spark.sql.connector.catalog.FunctionCatalog
org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException