TableOperations ops
Table table
java.lang.String name
TableOperations ops
Table table
java.lang.String name
TableOperations ops
Table table
java.lang.String name
FileScanTask[] tasks
java.lang.Object writeReplace()
TableOperations ops
java.lang.String name
TableOperations ops
Table table
java.lang.String name
int[] fromProjectionPos
InputFile file
java.lang.String manifestPath
java.lang.Long length
int specId
ManifestContent content
long sequenceNumber
long minSequenceNumber
java.lang.Long snapshotId
java.lang.Integer addedFilesCount
java.lang.Integer existingFilesCount
java.lang.Integer deletedFilesCount
java.lang.Long addedRowsCount
java.lang.Long existingRowsCount
java.lang.Long deletedRowsCount
java.util.List<E> partitions
int[] fromProjectionPos
boolean containsNull
byte[] lowerBound
byte[] upperBound
TableOperations ops
Table table
java.lang.String name
TableOperations ops
Table table
java.lang.String name
java.lang.String partition
TableOperations ops
Table table
PartitionSpec spec
java.lang.String name
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException
java.io.IOException
- On serialization errorjava.lang.ClassNotFoundException
- If the class is not foundprivate void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException
java.io.IOException
- On serialization errorjava.lang.Long rowCount
java.util.Map<K,V> columnSizes
java.util.Map<K,V> valueCounts
java.util.Map<K,V> nullValueCounts
java.util.Map<K,V> nanValueCounts
java.util.Map<K,V> lowerBounds
java.util.Map<K,V> upperBounds
java.util.Map<K,V> columnModes
MetricsModes.MetricsMode defaultMode
int length
Schema schema
int specId
PartitionField[] fields
int lastAssignedFieldId
TableOperations ops
Table table
Schema schema
java.lang.String name
Types.StructType struct
TableOperations ops
Table table
java.lang.String name
Transform<S,T> transform
int sourceId
SortDirection direction
NullOrder nullOrder
java.lang.String metadataFileLocation
int formatVersion
java.lang.String uuid
java.lang.String location
long lastSequenceNumber
long lastUpdatedMillis
int lastColumnId
Schema schema
int defaultSpecId
java.util.List<E> specs
int defaultSortOrderId
java.util.List<E> sortOrders
java.util.Map<K,V> properties
long currentSnapshotId
java.util.List<E> snapshots
java.util.Map<K,V> snapshotsById
java.util.Map<K,V> specsById
java.util.Map<K,V> sortOrdersById
java.util.List<E> snapshotLog
java.util.List<E> previousFiles
long timestampMillis
long snapshotId
java.lang.String roleArn
java.lang.String externalId
int timeout
java.lang.String region
java.lang.String s3FileIoSseType
java.lang.String s3FileIoSseKey
java.lang.String s3FileIoSseMd5
int s3FileIoMultipartUploadThreads
int s3FileIoMultiPartSize
double s3FileIoMultipartThresholdFactor
java.lang.String s3fileIoStagingDirectory
software.amazon.awssdk.services.s3.model.ObjectCannedACL s3FileIoAcl
java.lang.String glueCatalogId
boolean glueCatalogSkipArchive
SerializableSupplier<T> s3
AwsProperties awsProperties
AwsClientFactory awsClientFactory
Expression left
Expression right
Types.NestedField field
Accessor<T> accessor
java.util.Set<E> literalSet
BoundReference<T> ref
Transform<S,T> transform
Expression expr
java.lang.Object writeReplace() throws java.io.ObjectStreamException
java.io.ObjectStreamException
java.lang.String name
Expression child
Expression left
Expression right
Expression.Operation op
Term term
PartitionSpec spec
Expression expr
boolean caseSensitive
java.lang.Object writeReplace() throws java.io.ObjectStreamException
java.io.ObjectStreamException
java.util.List<E> literals
NamedReference<T> ref
Transform<S,T> transform
SerializableConfiguration hadoopConf
java.util.Map<K,V> properties
java.lang.String name
java.lang.String impl
java.lang.String catalogName
SerializableConfiguration hadoopConf
java.lang.String warehouseLocation
java.util.Map<K,V> properties
java.lang.String catalogName
SerializableConfiguration hadoopConf
java.lang.String uri
java.lang.String warehouse
int clientPoolSize
java.util.Map<K,V> properties
CatalogLoader catalogLoader
java.lang.String identifier
java.lang.String location
SerializableConfiguration hadoopConf
Schema schema
org.apache.flink.table.types.logical.RowType flinkSchema
java.util.Map<K,V> props
PartitionSpec spec
int[] equalityFieldIds
Schema eqDeleteRowSchema
Schema posDeleteRowSchema
org.apache.flink.table.types.logical.RowType eqDeleteFlinkSchema
org.apache.flink.table.types.logical.RowType posDeleteFlinkSchema
Schema schema
org.apache.flink.table.types.logical.RowType flinkSchema
PartitionSpec spec
LocationProvider locations
FileIO io
EncryptionManager encryptionManager
long targetFileSizeBytes
FileFormat format
java.util.List<E> equalityFieldIds
FileAppenderFactory<T> appenderFactory
TableLoader tableLoader
Schema tableSchema
FileIO io
EncryptionManager encryption
org.apache.iceberg.flink.source.ScanContext context
int splitNumber
CombinedScanTask task
TaskWriter<T> writer
int subTaskId
int attemptId
Schema schema
java.lang.String nameMapping
FileIO io
boolean caseSensitive
EncryptionManager encryptionManager
TaskWriterFactory<T> taskWriterFactory
TableLoader tableLoader
org.apache.iceberg.flink.source.ScanContext scanContext
boolean isRunning
long lastSnapshotId
org.apache.flink.streaming.api.operators.MailboxExecutor executor
FlinkInputFormat format
SerializableSupplier<T> hadoopConf
private void readObject(java.io.ObjectInputStream in) throws java.lang.ClassNotFoundException, java.io.IOException
java.lang.ClassNotFoundException
java.io.IOException
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException
java.io.IOException
DataFile[] dataFiles
DeleteFile[] deleteFiles
java.lang.CharSequence[] referencedDataFiles
java.util.Set<E> names
java.lang.Integer id
MappedFields nestedMapping
java.util.List<E> fields
MappedFields mapping
java.util.Map<K,V> values
java.lang.String uri
java.lang.String format
Schema schema
PartitionSpec spec
java.util.Map<K,V> properties
FileFormat format
org.apache.spark.broadcast.Broadcast<T> io
org.apache.spark.broadcast.Broadcast<T> encryptionManager
LocationProvider locations
java.lang.String nameMapping
boolean caseSensitive
DataFile[] taskFiles
Type sourceType
java.lang.String transform
java.lang.Object writeReplace() throws java.io.ObjectStreamException
java.io.ObjectStreamException
int scale
int precision
int length
Types.NestedField elementField
Types.NestedField keyField
Types.NestedField valueField
boolean isOptional
int id
java.lang.String name
Type type
java.lang.String doc
Types.NestedField[] fields
boolean adjustToUTC
java.util.Set<E> wrapperSet
java.lang.CharSequence wrapped
org.apache.avro.Schema schema
java.lang.Object first
java.lang.Object second
org.apache.spark.sql.internal.SQLConf conf
org.apache.spark.sql.catalyst.analysis.AssignmentAlignmentSupport.ColumnUpdate$ ColumnUpdate$module
private java.lang.Object readResolve()
scala.Option<A> defaultDatabase
int nestedViewDepth
private java.lang.Object readResolve()
java.lang.ThreadLocal<T> value
scala.collection.Seq<A> ref
org.apache.spark.sql.catalyst.expressions.Expression expr
private java.lang.Object readResolve()
int ordinal
org.apache.spark.sql.types.DataType dataType
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.expressions.Expression child
scala.collection.Seq<A> names
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
java.lang.String db
private java.lang.Object readResolve()
org.apache.spark.sql.connector.catalog.CatalogPlugin catalog
scala.collection.Seq<A> namespace
private java.lang.Object readResolve()
scala.collection.Seq<A> expressions
private java.lang.Object readResolve()
org.apache.spark.sql.connector.catalog.TableCatalog catalog
org.apache.spark.sql.connector.catalog.Identifier identifier
org.apache.spark.sql.connector.catalog.Table table
private java.lang.Object readResolve()
org.apache.spark.sql.connector.catalog.Identifier identifier
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.catalog.SessionCatalog catalog
private java.lang.Object readResolve()
org.apache.spark.sql.internal.SQLConf conf
private java.lang.Object readResolve()
org.apache.spark.sql.internal.SQLConf conf
scala.Function1<T1,R> org$apache$spark$sql$catalyst$analysis$ResolveLambdaVariables$$canonicalizer
private java.lang.Object readResolve()
org.apache.spark.sql.connector.catalog.CatalogManager catalogManager
org.apache.spark.sql.SparkSession spark
org.apache.spark.sql.connector.catalog.LookupCatalog.CatalogAndMultipartIdentifier$ CatalogAndMultipartIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.SessionCatalogAndIdentifier$ SessionCatalogAndIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.NonSessionCatalogAndIdentifier$ NonSessionCatalogAndIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.CatalogAndNamespace$ CatalogAndNamespace$module
org.apache.spark.sql.connector.catalog.LookupCatalog.CatalogAndIdentifier$ CatalogAndIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.AsTableIdentifier$ AsTableIdentifier$module
boolean bitmap$0
private java.lang.Object readResolve()
scala.collection.Seq<A> args
private java.lang.Object readResolve()
org.apache.spark.sql.internal.SQLConf conf
scala.PartialFunction<A,B> transformTimeZoneExprs
private java.lang.Object readResolve()
java.lang.String message
private java.lang.Object readResolve()
org.apache.spark.sql.internal.SQLConf conf
private java.lang.Object readResolve()
org.apache.spark.sql.internal.SQLConf conf
private java.lang.Object readResolve()
org.apache.spark.sql.internal.SQLConf conf
private java.lang.Object readResolve()
org.apache.spark.sql.internal.SQLConf conf
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.expressions.Expression child
scala.Option<A> aliasFunc
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
scala.collection.Seq<A> nameParts
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.expressions.Expression deserializer
scala.collection.Seq<A> inputAttributes
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.expressions.Expression child
org.apache.spark.sql.catalyst.expressions.Expression extraction
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.FunctionIdentifier name
scala.collection.Seq<A> children
boolean isDistinct
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.FunctionIdentifier name
scala.collection.Seq<A> children
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.expressions.Expression havingCondition
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
scala.collection.Seq<A> names
scala.collection.Seq<A> rows
boolean expressionsResolved
boolean resolved
byte bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> multipartIdentifier
boolean bitmap$0
private java.lang.Object readResolve()
int ordinal
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
java.lang.String regexPattern
scala.Option<A> table
boolean caseSensitive
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.TableIdentifier tableIdentifier
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
scala.Option<A> target
private java.lang.Object readResolve()
scala.collection.Seq<A> outputColumnNames
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> multipartIdentifier
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> multipartIdentifier
boolean bitmap$0
private java.lang.Object readResolve()
java.lang.String functionName
scala.collection.Seq<A> functionArgs
scala.collection.Seq<A> outputNames
boolean resolved
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> originalNameParts
org.apache.spark.sql.connector.catalog.TableCatalog catalog
org.apache.spark.sql.connector.catalog.Identifier tableName
boolean bitmap$0
private java.lang.Object readResolve()
private java.lang.Object readResolve()