public class SparkCachedTableCatalog
extends java.lang.Object
implements org.apache.spark.sql.connector.catalog.TableCatalog
| Constructor and Description |
|---|
SparkCachedTableCatalog() |
| Modifier and Type | Method and Description |
|---|---|
SparkTable |
alterTable(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.connector.catalog.TableChange... changes) |
SparkTable |
createTable(org.apache.spark.sql.connector.catalog.Identifier ident,
org.apache.spark.sql.types.StructType schema,
org.apache.spark.sql.connector.expressions.Transform[] partitions,
java.util.Map<java.lang.String,java.lang.String> properties) |
boolean |
dropTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
void |
initialize(java.lang.String catalogName,
org.apache.spark.sql.util.CaseInsensitiveStringMap options) |
void |
invalidateTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
default boolean |
isExistingNamespace(java.lang.String[] namespace) |
default boolean |
isFunctionNamespace(java.lang.String[] namespace) |
default org.apache.spark.sql.connector.catalog.Identifier[] |
listFunctions(java.lang.String[] namespace) |
org.apache.spark.sql.connector.catalog.Identifier[] |
listTables(java.lang.String[] namespace) |
default org.apache.spark.sql.connector.catalog.functions.UnboundFunction |
loadFunction(org.apache.spark.sql.connector.catalog.Identifier ident) |
SparkTable |
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
SparkTable |
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident,
long timestampMicros) |
SparkTable |
loadTable(org.apache.spark.sql.connector.catalog.Identifier ident,
java.lang.String version) |
java.lang.String |
name() |
boolean |
purgeTable(org.apache.spark.sql.connector.catalog.Identifier ident) |
void |
renameTable(org.apache.spark.sql.connector.catalog.Identifier oldIdent,
org.apache.spark.sql.connector.catalog.Identifier newIdent) |
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, waitcapabilities, createTable, tableExists, useNullableQuerySchemapublic org.apache.spark.sql.connector.catalog.Identifier[] listTables(java.lang.String[] namespace)
listTables in interface org.apache.spark.sql.connector.catalog.TableCatalogpublic SparkTable loadTable(org.apache.spark.sql.connector.catalog.Identifier ident) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
loadTable in interface org.apache.spark.sql.connector.catalog.TableCatalogorg.apache.spark.sql.catalyst.analysis.NoSuchTableExceptionpublic SparkTable loadTable(org.apache.spark.sql.connector.catalog.Identifier ident, java.lang.String version) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
loadTable in interface org.apache.spark.sql.connector.catalog.TableCatalogorg.apache.spark.sql.catalyst.analysis.NoSuchTableExceptionpublic SparkTable loadTable(org.apache.spark.sql.connector.catalog.Identifier ident, long timestampMicros) throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException
loadTable in interface org.apache.spark.sql.connector.catalog.TableCatalogorg.apache.spark.sql.catalyst.analysis.NoSuchTableExceptionpublic void invalidateTable(org.apache.spark.sql.connector.catalog.Identifier ident)
invalidateTable in interface org.apache.spark.sql.connector.catalog.TableCatalogpublic SparkTable createTable(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.types.StructType schema, org.apache.spark.sql.connector.expressions.Transform[] partitions, java.util.Map<java.lang.String,java.lang.String> properties) throws org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
createTable in interface org.apache.spark.sql.connector.catalog.TableCatalogorg.apache.spark.sql.catalyst.analysis.TableAlreadyExistsExceptionpublic SparkTable alterTable(org.apache.spark.sql.connector.catalog.Identifier ident, org.apache.spark.sql.connector.catalog.TableChange... changes)
alterTable in interface org.apache.spark.sql.connector.catalog.TableCatalogpublic boolean dropTable(org.apache.spark.sql.connector.catalog.Identifier ident)
dropTable in interface org.apache.spark.sql.connector.catalog.TableCatalogpublic boolean purgeTable(org.apache.spark.sql.connector.catalog.Identifier ident)
throws java.lang.UnsupportedOperationException
purgeTable in interface org.apache.spark.sql.connector.catalog.TableCatalogjava.lang.UnsupportedOperationExceptionpublic void renameTable(org.apache.spark.sql.connector.catalog.Identifier oldIdent,
org.apache.spark.sql.connector.catalog.Identifier newIdent)
renameTable in interface org.apache.spark.sql.connector.catalog.TableCatalogpublic void initialize(java.lang.String catalogName,
org.apache.spark.sql.util.CaseInsensitiveStringMap options)
initialize in interface org.apache.spark.sql.connector.catalog.CatalogPluginpublic java.lang.String name()
name in interface org.apache.spark.sql.connector.catalog.CatalogPluginpublic boolean isFunctionNamespace(java.lang.String[] namespace)
public boolean isExistingNamespace(java.lang.String[] namespace)
public org.apache.spark.sql.connector.catalog.Identifier[] listFunctions(java.lang.String[] namespace)
throws org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
listFunctions in interface org.apache.spark.sql.connector.catalog.FunctionCatalogorg.apache.spark.sql.catalyst.analysis.NoSuchNamespaceExceptionpublic org.apache.spark.sql.connector.catalog.functions.UnboundFunction loadFunction(org.apache.spark.sql.connector.catalog.Identifier ident)
throws org.apache.spark.sql.catalyst.analysis.NoSuchFunctionException
loadFunction in interface org.apache.spark.sql.connector.catalog.FunctionCatalogorg.apache.spark.sql.catalyst.analysis.NoSuchFunctionException