public class BaseSnapshotTableSparkAction extends java.lang.Object implements SnapshotTable
SnapshotTable.Result| Modifier and Type | Field and Description |
|---|---|
protected static java.util.List<java.lang.String> |
EXCLUDED_PROPERTIES |
protected static java.lang.String |
ICEBERG_METADATA_FOLDER |
protected static java.lang.String |
LOCATION |
| Constructor and Description |
|---|
BaseSnapshotTableSparkAction(org.apache.spark.sql.SparkSession spark,
org.apache.spark.sql.connector.catalog.CatalogPlugin sourceCatalog,
org.apache.spark.sql.connector.catalog.Identifier sourceTableIdent,
org.apache.spark.sql.connector.catalog.CatalogPlugin destCatalog,
org.apache.spark.sql.connector.catalog.Identifier destTableIdent) |
| Modifier and Type | Method and Description |
|---|---|
protected java.util.Map<java.lang.String,java.lang.String> |
additionalProperties() |
SnapshotTable |
as(java.lang.String ident)
Sets the table identifier for the newly created Iceberg table.
|
protected org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
buildManifestFileDF(Table table) |
protected org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
buildManifestListDF(Table table) |
protected org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
buildOtherMetadataFileDF(Table table) |
protected org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
buildValidDataFileDF(Table table) |
protected org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
buildValidMetadataFileDF(Table table) |
protected org.apache.spark.sql.connector.catalog.StagingTableCatalog |
checkDestinationCatalog(org.apache.spark.sql.connector.catalog.CatalogPlugin catalog) |
protected org.apache.spark.sql.connector.catalog.TableCatalog |
checkSourceCatalog(org.apache.spark.sql.connector.catalog.CatalogPlugin catalog) |
protected org.apache.spark.sql.connector.catalog.StagingTableCatalog |
destCatalog() |
protected org.apache.spark.sql.connector.catalog.Identifier |
destTableIdent() |
protected java.util.Map<java.lang.String,java.lang.String> |
destTableProps() |
protected void |
ensureNameMappingPresent(Table table) |
SnapshotTable.Result |
execute()
Executes this action.
|
protected java.lang.String |
getMetadataLocation(Table table) |
protected org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> |
loadMetadataTable(Table table,
MetadataTableType type) |
protected JobGroupInfo |
newJobGroupInfo(java.lang.String groupId,
java.lang.String desc) |
protected Table |
newStaticTable(TableMetadata metadata,
FileIO io) |
ThisT |
option(java.lang.String name,
java.lang.String value)
Configures this action with an extra option.
|
protected java.util.Map<java.lang.String,java.lang.String> |
options() |
ThisT |
options(java.util.Map<java.lang.String,java.lang.String> newOptions)
Configures this action with extra options.
|
protected SnapshotTable |
self() |
protected void |
setProperties(java.util.Map<java.lang.String,java.lang.String> properties) |
protected void |
setProperty(java.lang.String key,
java.lang.String value) |
protected org.apache.spark.sql.connector.catalog.TableCatalog |
sourceCatalog() |
protected org.apache.spark.sql.connector.catalog.Identifier |
sourceTableIdent() |
protected java.lang.String |
sourceTableLocation() |
protected org.apache.spark.sql.SparkSession |
spark() |
protected org.apache.spark.api.java.JavaSparkContext |
sparkContext() |
protected StagedSparkTable |
stageDestTable() |
SnapshotTable |
tableLocation(java.lang.String location)
Sets the table location for the newly created Iceberg table.
|
SnapshotTable |
tableProperties(java.util.Map<java.lang.String,java.lang.String> properties)
Sets table properties in the newly created Iceberg table.
|
SnapshotTable |
tableProperty(java.lang.String property,
java.lang.String value)
Sets a table property in the newly created Iceberg table.
|
protected org.apache.spark.sql.catalyst.catalog.CatalogTable |
v1SourceTable() |
protected <T> T |
withJobGroupInfo(JobGroupInfo info,
java.util.function.Supplier<T> supplier) |
protected static final java.lang.String LOCATION
protected static final java.lang.String ICEBERG_METADATA_FOLDER
protected static final java.util.List<java.lang.String> EXCLUDED_PROPERTIES
public BaseSnapshotTableSparkAction(org.apache.spark.sql.SparkSession spark,
org.apache.spark.sql.connector.catalog.CatalogPlugin sourceCatalog,
org.apache.spark.sql.connector.catalog.Identifier sourceTableIdent,
org.apache.spark.sql.connector.catalog.CatalogPlugin destCatalog,
org.apache.spark.sql.connector.catalog.Identifier destTableIdent)
protected SnapshotTable self()
protected org.apache.spark.sql.connector.catalog.StagingTableCatalog destCatalog()
protected org.apache.spark.sql.connector.catalog.Identifier destTableIdent()
public SnapshotTable as(java.lang.String ident)
SnapshotTableas in interface SnapshotTableident - the destination table identifierpublic SnapshotTable tableProperties(java.util.Map<java.lang.String,java.lang.String> properties)
SnapshotTabletableProperties in interface SnapshotTableproperties - a map of properties to be includedpublic SnapshotTable tableProperty(java.lang.String property, java.lang.String value)
SnapshotTabletableProperty in interface SnapshotTableproperty - the key of the property to addvalue - the value of the property to addpublic SnapshotTable.Result execute()
Actionexecute in interface Action<SnapshotTable,SnapshotTable.Result>protected java.util.Map<java.lang.String,java.lang.String> destTableProps()
protected org.apache.spark.sql.connector.catalog.TableCatalog checkSourceCatalog(org.apache.spark.sql.connector.catalog.CatalogPlugin catalog)
public SnapshotTable tableLocation(java.lang.String location)
SnapshotTabletableLocation in interface SnapshotTablelocation - a table locationprotected java.lang.String sourceTableLocation()
protected org.apache.spark.sql.catalyst.catalog.CatalogTable v1SourceTable()
protected org.apache.spark.sql.connector.catalog.TableCatalog sourceCatalog()
protected org.apache.spark.sql.connector.catalog.Identifier sourceTableIdent()
protected void setProperties(java.util.Map<java.lang.String,java.lang.String> properties)
protected void setProperty(java.lang.String key,
java.lang.String value)
protected java.util.Map<java.lang.String,java.lang.String> additionalProperties()
protected org.apache.spark.sql.connector.catalog.StagingTableCatalog checkDestinationCatalog(org.apache.spark.sql.connector.catalog.CatalogPlugin catalog)
protected StagedSparkTable stageDestTable()
protected void ensureNameMappingPresent(Table table)
protected java.lang.String getMetadataLocation(Table table)
protected org.apache.spark.sql.SparkSession spark()
protected org.apache.spark.api.java.JavaSparkContext sparkContext()
public ThisT option(java.lang.String name,
java.lang.String value)
ActionCertain actions allow users to control internal details of their execution via options.
public ThisT options(java.util.Map<java.lang.String,java.lang.String> newOptions)
ActionCertain actions allow users to control internal details of their execution via options.
protected java.util.Map<java.lang.String,java.lang.String> options()
protected <T> T withJobGroupInfo(JobGroupInfo info, java.util.function.Supplier<T> supplier)
protected JobGroupInfo newJobGroupInfo(java.lang.String groupId, java.lang.String desc)
protected Table newStaticTable(TableMetadata metadata, FileIO io)
protected org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> buildValidDataFileDF(Table table)
protected org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> buildManifestFileDF(Table table)
protected org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> buildManifestListDF(Table table)
protected org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> buildOtherMetadataFileDF(Table table)
protected org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> buildValidMetadataFileDF(Table table)
protected org.apache.spark.sql.Dataset<org.apache.spark.sql.Row> loadMetadataTable(Table table, MetadataTableType type)