Package org.apache.iceberg.spark.source
Class SparkMicroBatchStream
java.lang.Object
org.apache.iceberg.spark.source.SparkMicroBatchStream
- All Implemented Interfaces:
org.apache.spark.sql.connector.read.streaming.MicroBatchStream
,org.apache.spark.sql.connector.read.streaming.SparkDataStream
,org.apache.spark.sql.connector.read.streaming.SupportsAdmissionControl
public class SparkMicroBatchStream
extends Object
implements org.apache.spark.sql.connector.read.streaming.MicroBatchStream, org.apache.spark.sql.connector.read.streaming.SupportsAdmissionControl
-
Method Summary
Modifier and TypeMethodDescriptionvoid
commit
(org.apache.spark.sql.connector.read.streaming.Offset end) org.apache.spark.sql.connector.read.PartitionReaderFactory
org.apache.spark.sql.connector.read.streaming.Offset
deserializeOffset
(String json) org.apache.spark.sql.connector.read.streaming.ReadLimit
org.apache.spark.sql.connector.read.streaming.Offset
org.apache.spark.sql.connector.read.streaming.Offset
org.apache.spark.sql.connector.read.streaming.Offset
latestOffset
(org.apache.spark.sql.connector.read.streaming.Offset startOffset, org.apache.spark.sql.connector.read.streaming.ReadLimit limit) org.apache.spark.sql.connector.read.InputPartition[]
planInputPartitions
(org.apache.spark.sql.connector.read.streaming.Offset start, org.apache.spark.sql.connector.read.streaming.Offset end) void
stop()
Methods inherited from class java.lang.Object
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
Methods inherited from interface org.apache.spark.sql.connector.read.streaming.SupportsAdmissionControl
reportLatestOffset
-
Method Details
-
latestOffset
public org.apache.spark.sql.connector.read.streaming.Offset latestOffset()- Specified by:
latestOffset
in interfaceorg.apache.spark.sql.connector.read.streaming.MicroBatchStream
-
planInputPartitions
public org.apache.spark.sql.connector.read.InputPartition[] planInputPartitions(org.apache.spark.sql.connector.read.streaming.Offset start, org.apache.spark.sql.connector.read.streaming.Offset end) - Specified by:
planInputPartitions
in interfaceorg.apache.spark.sql.connector.read.streaming.MicroBatchStream
-
createReaderFactory
public org.apache.spark.sql.connector.read.PartitionReaderFactory createReaderFactory()- Specified by:
createReaderFactory
in interfaceorg.apache.spark.sql.connector.read.streaming.MicroBatchStream
-
initialOffset
public org.apache.spark.sql.connector.read.streaming.Offset initialOffset()- Specified by:
initialOffset
in interfaceorg.apache.spark.sql.connector.read.streaming.SparkDataStream
-
deserializeOffset
- Specified by:
deserializeOffset
in interfaceorg.apache.spark.sql.connector.read.streaming.SparkDataStream
-
commit
public void commit(org.apache.spark.sql.connector.read.streaming.Offset end) - Specified by:
commit
in interfaceorg.apache.spark.sql.connector.read.streaming.SparkDataStream
-
stop
public void stop()- Specified by:
stop
in interfaceorg.apache.spark.sql.connector.read.streaming.SparkDataStream
-
latestOffset
public org.apache.spark.sql.connector.read.streaming.Offset latestOffset(org.apache.spark.sql.connector.read.streaming.Offset startOffset, org.apache.spark.sql.connector.read.streaming.ReadLimit limit) - Specified by:
latestOffset
in interfaceorg.apache.spark.sql.connector.read.streaming.SupportsAdmissionControl
-
getDefaultReadLimit
public org.apache.spark.sql.connector.read.streaming.ReadLimit getDefaultReadLimit()- Specified by:
getDefaultReadLimit
in interfaceorg.apache.spark.sql.connector.read.streaming.SupportsAdmissionControl
-