org.apache.hadoop.hive.ql.exec.vector.VectorizationContext vContext
org.apache.hadoop.hive.ql.exec.vector.VectorizationContext vContext
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression conditionEvaluator
org.apache.hadoop.hive.ql.plan.VectorGroupByDesc vectorDesc
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression[] aggregators
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression[] keyExpressions
int outputKeyLength
boolean isVectorOutput
org.apache.hadoop.hive.ql.exec.vector.VectorizationContext vOutContext
float memoryThreshold
org.apache.hadoop.hive.ql.exec.vector.VectorizationContext vOutContext
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression[] keyExpressions
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression[] bigTableFilterExpressions
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression[] bigTableValueExpressions
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter[] rowWriters
org.apache.hadoop.hive.ql.exec.vector.VectorizationContext vContext
org.apache.hadoop.hive.serde2.Deserializer currentPartDeserializer
org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector currentPartRawRowObjectInspector
org.apache.hadoop.hive.ql.exec.vector.VectorAssignRow currentVectorAssign
org.apache.hadoop.hive.ql.exec.vector.VectorizationContext vContext
org.apache.hadoop.hive.ql.plan.VectorSelectDesc vectorDesc
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression[] vExpressions
int[] projectedOutputColumns
org.apache.hadoop.hive.ql.exec.vector.VectorizationContext vOutContext
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression[] bigTableValueExpressions
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression[] bigTableFilterExpressions
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression[] keyExpressions
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter[] keyOutputWriters
org.apache.hadoop.hive.ql.exec.vector.VectorizationContext vOutContext
org.apache.hadoop.hive.ql.exec.vector.VectorizationContext vContext
org.apache.hadoop.hive.ql.exec.vector.VectorizationContext vContext
FileScanTask[] tasks
java.lang.Object writeReplace()
TableOperations ops
java.lang.String name
java.lang.Object writeReplace()
int[] fromProjectionPos
InputFile file
java.lang.String manifestPath
java.lang.Long length
int specId
ManifestContent content
long sequenceNumber
long minSequenceNumber
java.lang.Long snapshotId
java.lang.Integer addedFilesCount
java.lang.Integer existingFilesCount
java.lang.Integer deletedFilesCount
java.lang.Long addedRowsCount
java.lang.Long existingRowsCount
java.lang.Long deletedRowsCount
ManifestFile.PartitionFieldSummary[] partitions
byte[] keyMetadata
int[] fromProjectionPos
boolean containsNull
java.lang.Boolean containsNaN
byte[] lowerBound
byte[] upperBound
java.lang.String partition
PartitionSpec spec
PartitionSpec spec
Schema schema
int lastColumnId
Snapshot snapshot
SortOrder sortOrder
java.lang.String uuid
java.util.Set<E> removed
long snapshotId
int schemaId
java.lang.Long snapshotId
int specId
int sortOrderId
java.lang.String location
java.util.Map<K,V> updated
int formatVersion
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException
java.io.IOException
- On serialization errorjava.lang.ClassNotFoundException
- If the class is not foundprivate void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException
java.io.IOException
- On serialization errorjava.lang.Long rowCount
java.util.Map<K,V> columnSizes
java.util.Map<K,V> valueCounts
java.util.Map<K,V> nullValueCounts
java.util.Map<K,V> nanValueCounts
java.util.Map<K,V> lowerBounds
java.util.Map<K,V> upperBounds
java.util.Map<K,V> columnModes
MetricsModes.MetricsMode defaultMode
int length
Schema schema
int specId
PartitionField[] fields
int lastAssignedFieldId
Schema schema
Types.StructType struct
int schemaId
int[] identifierFieldIds
java.lang.String name
java.lang.String location
java.lang.String metadataFileLocation
java.util.Map<K,V> properties
java.lang.String schemaAsJson
int defaultSpecId
java.util.Map<K,V> specAsJsonMap
java.lang.String sortOrderAsJson
FileIO io
EncryptionManager encryption
LocationProvider locationProvider
Transform<S,T> transform
int sourceId
SortDirection direction
NullOrder nullOrder
java.lang.String metadataFileLocation
int formatVersion
java.lang.String uuid
java.lang.String location
long lastSequenceNumber
long lastUpdatedMillis
int lastColumnId
int currentSchemaId
java.util.List<E> schemas
int defaultSpecId
java.util.List<E> specs
int lastAssignedPartitionId
int defaultSortOrderId
java.util.List<E> sortOrders
java.util.Map<K,V> properties
long currentSnapshotId
java.util.List<E> snapshots
java.util.Map<K,V> snapshotsById
java.util.Map<K,V> schemasById
java.util.Map<K,V> specsById
java.util.Map<K,V> sortOrdersById
java.util.List<E> snapshotLog
java.util.List<E> previousFiles
java.util.List<E> changes
long timestampMillis
long snapshotId
int minInputFiles
int deleteFileThreshold
long minFileSize
long maxFileSize
long targetFileSize
long maxGroupSize
boolean rewriteAll
SortOrder sortOrder
java.lang.String ossEndpoint
java.lang.String accessKeyId
java.lang.String accessKeySecret
java.lang.String ossStagingDirectory
SerializableSupplier<T> oss
AliyunProperties aliyunProperties
java.util.concurrent.atomic.AtomicBoolean isResourceClosed
java.lang.String roleArn
java.lang.String externalId
int timeout
java.lang.String region
java.lang.String s3Endpoint
java.lang.String s3FileIoSseType
java.lang.String s3FileIoSseKey
java.lang.String s3FileIoSseMd5
int s3FileIoMultipartUploadThreads
int s3FileIoMultiPartSize
double s3FileIoMultipartThresholdFactor
java.lang.String s3fileIoStagingDirectory
software.amazon.awssdk.services.s3.model.ObjectCannedACL s3FileIoAcl
java.lang.String glueCatalogId
boolean glueCatalogSkipArchive
java.lang.String dynamoDbTableName
boolean isS3ChecksumEnabled
SerializableSupplier<T> s3
AwsProperties awsProperties
AwsClientFactory awsClientFactory
java.util.concurrent.atomic.AtomicBoolean isResourceClosed
Expression left
Expression right
Types.NestedField field
Accessor<T> accessor
java.util.Set<E> literalSet
BoundReference<T> ref
Transform<S,T> transform
Expression expr
java.lang.Object writeReplace() throws java.io.ObjectStreamException
java.io.ObjectStreamException
java.lang.String name
Expression child
Expression left
Expression right
Expression.Operation op
Term term
PartitionSpec spec
Expression expr
boolean caseSensitive
java.lang.Object writeReplace() throws java.io.ObjectStreamException
java.io.ObjectStreamException
java.util.List<E> literals
NamedReference<T> ref
Transform<S,T> transform
SerializableConfiguration hadoopConf
java.util.Map<K,V> properties
java.lang.String name
java.lang.String impl
java.lang.String catalogName
SerializableConfiguration hadoopConf
java.lang.String warehouseLocation
java.util.Map<K,V> properties
java.lang.String catalogName
SerializableConfiguration hadoopConf
java.lang.String uri
java.lang.String warehouse
int clientPoolSize
java.util.Map<K,V> properties
CatalogLoader catalogLoader
java.lang.String identifier
java.lang.String location
SerializableConfiguration hadoopConf
Schema schema
org.apache.flink.table.types.logical.RowType flinkSchema
java.util.Map<K,V> props
PartitionSpec spec
int[] equalityFieldIds
Schema eqDeleteRowSchema
Schema posDeleteRowSchema
org.apache.flink.table.types.logical.RowType eqDeleteFlinkSchema
org.apache.flink.table.types.logical.RowType posDeleteFlinkSchema
Table table
Schema schema
org.apache.flink.table.types.logical.RowType flinkSchema
PartitionSpec spec
FileIO io
long targetFileSizeBytes
FileFormat format
java.util.List<E> equalityFieldIds
boolean upsert
FileAppenderFactory<T> appenderFactory
TableLoader tableLoader
FileIO io
EncryptionManager encryption
org.apache.iceberg.flink.source.ScanContext context
RowDataFileScanTaskReader rowDataReader
int splitNumber
CombinedScanTask task
TaskWriter<T> writer
int subTaskId
int attemptId
Schema schema
java.lang.String nameMapping
FileIO io
boolean caseSensitive
EncryptionManager encryptionManager
TaskWriterFactory<T> taskWriterFactory
RowDataFileScanTaskReader rowDataReader
TableLoader tableLoader
org.apache.iceberg.flink.source.ScanContext scanContext
boolean isRunning
long lastSnapshotId
org.apache.flink.api.common.operators.MailboxExecutor executor
FlinkInputFormat format
CombinedScanTask task
int fileOffset
long recordOffset
java.lang.String projectId
java.lang.String clientLibToken
java.lang.String serviceHost
java.lang.String gcsDecryptionKey
java.lang.String gcsEncryptionKey
java.lang.String gcsUserProject
java.lang.Integer gcsChannelReadChunkSize
java.lang.Integer gcsChannelWriteChunkSize
SerializableSupplier<T> storageSupplier
GCPProperties gcpProperties
java.util.concurrent.atomic.AtomicBoolean isResourceClosed
SerializableSupplier<T> hadoopConf
private void readObject(java.io.ObjectInputStream in) throws java.lang.ClassNotFoundException, java.io.IOException
java.lang.ClassNotFoundException
java.io.IOException
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException
java.io.IOException
java.util.Map<K,V> ioInstances
java.util.Map<K,V> properties
SerializableSupplier<T> hadoopConf
DataFile[] dataFiles
DeleteFile[] deleteFiles
java.lang.CharSequence[] referencedDataFiles
java.util.Set<E> names
java.lang.Integer id
MappedFields nestedMapping
java.util.List<E> fields
MappedFields mapping
java.util.Map<K,V> values
java.lang.String uri
java.lang.String format
Table table
org.apache.spark.sql.SparkSession spark
FileScanTaskSetManager manager
FileRewriteCoordinator rewriteCoordinator
Table table
org.apache.spark.sql.SparkSession spark
FileScanTaskSetManager manager
FileRewriteCoordinator rewriteCoordinator
double sizeEstimateMultiple
org.apache.spark.broadcast.Broadcast<T> tableBroadcast
PartitionSpec spec
FileFormat format
boolean caseSensitive
DataFile[] dataFiles
DeleteFile[] deleteFiles
java.lang.CharSequence[] referencedDataFiles
DataFile[] taskFiles
Type sourceType
java.lang.String transform
java.lang.Object writeReplace() throws java.io.ObjectStreamException
java.io.ObjectStreamException
int scale
int precision
int length
Types.NestedField elementField
Types.NestedField keyField
Types.NestedField valueField
boolean isOptional
int id
java.lang.String name
Type type
java.lang.String doc
Types.NestedField[] fields
boolean adjustToUTC
java.util.Set<E> wrapperSet
java.lang.CharSequence wrapped
org.apache.avro.Schema schema
java.lang.Object first
java.lang.Object second
java.util.Map<K,V> copiedMap
scala.collection.Seq<A> catalogAndNamespace
int nestedViewDepth
int maxNestedViewDepth
scala.collection.mutable.Map<K,V> relationCache
scala.collection.Seq<A> referredTempViewNames
scala.collection.Seq<A> referredTempFunctionNames
scala.Option<A> outerPlan
private java.lang.Object readResolve()
java.lang.ThreadLocal<T> value
org.apache.spark.sql.connector.catalog.CatalogManager catalogManager
org.apache.spark.sql.connector.catalog.LookupCatalog.CatalogAndMultipartIdentifier$ CatalogAndMultipartIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.SessionCatalogAndIdentifier$ SessionCatalogAndIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.NonSessionCatalogAndIdentifier$ NonSessionCatalogAndIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.CatalogAndNamespace$ CatalogAndNamespace$module
org.apache.spark.sql.connector.catalog.LookupCatalog.CatalogAndIdentifier$ CatalogAndIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.AsTableIdentifier$ AsTableIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.AsFunctionIdentifier$ AsFunctionIdentifier$module
private java.lang.Object readResolve()
boolean resolved
int ordinal
org.apache.spark.sql.types.DataType dataType
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
java.lang.String viewName
java.lang.String colName
int ordinal
int expectedNumCandidates
scala.Option<A> viewDDL
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
org.apache.spark.sql.catalyst.expressions.Expression child
scala.collection.Seq<A> names
scala.collection.Seq<A> nodePatterns
boolean bitmap$0
private java.lang.Object readResolve()
java.lang.String db
private java.lang.Object readResolve()
private java.lang.Object readResolve()
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan plan
int _hashCode
private java.lang.Object readResolve()
scala.collection.Seq<A> path
org.apache.spark.sql.types.StructField field
private java.lang.Object readResolve()
org.apache.spark.sql.connector.catalog.TableChange.ColumnPosition position
private java.lang.Object readResolve()
org.apache.spark.sql.connector.catalog.Identifier identifier
private java.lang.Object readResolve()
org.apache.spark.sql.connector.catalog.CatalogPlugin catalog
scala.collection.Seq<A> namespace
private java.lang.Object readResolve()
scala.collection.Seq<A> names
org.apache.spark.sql.catalyst.InternalRow ident
scala.Option<A> location
private java.lang.Object readResolve()
scala.collection.Seq<A> expressions
private java.lang.Object readResolve()
org.apache.spark.sql.connector.catalog.TableCatalog catalog
org.apache.spark.sql.connector.catalog.Identifier identifier
org.apache.spark.sql.connector.catalog.Table table
scala.collection.Seq<A> outputAttributes
private java.lang.Object readResolve()
org.apache.spark.sql.connector.catalog.Identifier identifier
boolean isTemp
private java.lang.Object readResolve()
org.apache.spark.sql.connector.catalog.CatalogManager catalogManager
org.apache.spark.sql.connector.catalog.LookupCatalog.CatalogAndMultipartIdentifier$ CatalogAndMultipartIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.SessionCatalogAndIdentifier$ SessionCatalogAndIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.NonSessionCatalogAndIdentifier$ NonSessionCatalogAndIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.CatalogAndNamespace$ CatalogAndNamespace$module
org.apache.spark.sql.connector.catalog.LookupCatalog.CatalogAndIdentifier$ CatalogAndIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.AsTableIdentifier$ AsTableIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.AsFunctionIdentifier$ AsFunctionIdentifier$module
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.analysis.Analyzer analyzer
org.apache.spark.sql.SparkSession spark
boolean bitmap$0
private java.lang.Object readResolve()
org.apache.spark.sql.connector.catalog.CatalogManager catalogManager
org.apache.spark.sql.SparkSession spark
org.apache.spark.sql.connector.catalog.LookupCatalog.CatalogAndMultipartIdentifier$ CatalogAndMultipartIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.SessionCatalogAndIdentifier$ SessionCatalogAndIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.NonSessionCatalogAndIdentifier$ NonSessionCatalogAndIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.CatalogAndNamespace$ CatalogAndNamespace$module
org.apache.spark.sql.connector.catalog.LookupCatalog.CatalogAndIdentifier$ CatalogAndIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.AsTableIdentifier$ AsTableIdentifier$module
org.apache.spark.sql.connector.catalog.LookupCatalog.AsFunctionIdentifier$ AsFunctionIdentifier$module
boolean bitmap$0
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.catalog.SessionCatalog catalog
private java.lang.Object readResolve()
private java.lang.Object readResolve()
org.apache.spark.sql.catalyst.expressions.Expression canonicalized
org.apache.spark.sql.catalyst.expressions.Expression child
scala.collection.Seq<A> nameParts
boolean bitmap$0
private java.lang.Object readResolve()
java.lang.String message
private java.lang.Object readResolve()
scala.collection.Seq<A> rules
boolean resolved
org.apache.spark.sql.catalyst.expressions.Expression child
scala.Option<A> aliasFunc
scala.collection.Seq<A> nodePatterns
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> nameParts
scala.collection.Seq<A> nodePatterns
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
org.apache.spark.sql.catalyst.expressions.Expression deserializer
scala.collection.Seq<A> inputAttributes
scala.collection.Seq<A> nodePatterns
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
org.apache.spark.sql.catalyst.expressions.Expression child
org.apache.spark.sql.catalyst.expressions.Expression extraction
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> name
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
org.apache.spark.sql.connector.catalog.TableChange.ColumnPosition position
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> multipartIdentifier
scala.collection.Seq<A> nodePatterns
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> nameParts
scala.collection.Seq<A> arguments
boolean isDistinct
scala.Option<A> filter
boolean ignoreNulls
scala.collection.Seq<A> nodePatterns
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
org.apache.spark.sql.catalyst.FunctionIdentifier name
scala.collection.Seq<A> children
scala.collection.Seq<A> nodePatterns
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
org.apache.spark.sql.catalyst.expressions.Expression havingCondition
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child
org.apache.spark.sql.catalyst.expressions.ExpressionSet validConstraints
byte bitmap$0
private java.lang.Object readResolve()
boolean expressionsResolved
boolean resolved
scala.collection.Seq<A> names
scala.collection.Seq<A> rows
byte bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> multipartIdentifier
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
int ordinal
scala.collection.Seq<A> nodePatterns
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.immutable.Map<K,V> spec
scala.Option<A> location
boolean bitmap$0
private java.lang.Object readResolve()
java.lang.String regexPattern
scala.Option<A> table
boolean caseSensitive
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> multipartIdentifier
org.apache.spark.sql.util.CaseInsensitiveStringMap options
boolean isStreaming
scala.collection.Seq<A> nodePatterns
boolean bitmap$0
private java.lang.Object readResolve()
private java.lang.Object readResolve()
boolean resolved
boolean bitmap$0
scala.Option<A> target
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> outputColumnNames
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan child
scala.collection.Seq<A> nodePatterns
org.apache.spark.sql.catalyst.expressions.ExpressionSet validConstraints
byte bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> multipartIdentifier
java.lang.String commandName
scala.Option<A> relationTypeMismatchHint
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> multipartIdentifier
java.lang.String commandName
boolean allowTempView
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
org.apache.spark.sql.catalyst.FunctionIdentifier name
scala.collection.Seq<A> functionArgs
scala.collection.Seq<A> outputNames
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> originalNameParts
org.apache.spark.sql.connector.catalog.TableCatalog catalog
org.apache.spark.sql.connector.catalog.Identifier tableName
boolean bitmap$0
private java.lang.Object readResolve()
boolean resolved
scala.collection.Seq<A> multipartIdentifier
java.lang.String commandName
boolean allowTemp
scala.Option<A> relationTypeMismatchHint
boolean bitmap$0
private java.lang.Object readResolve()
private java.lang.Object readResolve()
scala.Option<A> command
org.apache.spark.sql.catalyst.trees.Origin start
org.apache.spark.sql.catalyst.trees.Origin stop
private java.lang.Object readResolve()