Package org.apache.iceberg.spark.source
Class SparkStreamingWrite
- java.lang.Object
-
- org.apache.iceberg.spark.source.SparkStreamingWrite
-
- All Implemented Interfaces:
org.apache.spark.sql.connector.write.BatchWrite
,org.apache.spark.sql.connector.write.streaming.StreamingWrite
public class SparkStreamingWrite extends java.lang.Object implements org.apache.spark.sql.connector.write.streaming.StreamingWrite
-
-
Method Summary
All Methods Instance Methods Concrete Methods Modifier and Type Method Description void
abort(long epochId, org.apache.spark.sql.connector.write.WriterCommitMessage[] messages)
void
abort(org.apache.spark.sql.connector.write.WriterCommitMessage[] messages)
void
commit(long epochId, org.apache.spark.sql.connector.write.WriterCommitMessage[] messages)
void
commit(org.apache.spark.sql.connector.write.WriterCommitMessage[] messages)
protected void
commitOperation(SnapshotUpdate<?> operation, int numFiles, java.lang.String description)
org.apache.iceberg.spark.source.SparkBatchWrite.WriterFactory
createBatchWriterFactory(org.apache.spark.sql.connector.write.PhysicalWriteInfo info)
org.apache.spark.sql.connector.write.streaming.StreamingDataWriterFactory
createStreamingWriterFactory(org.apache.spark.sql.connector.write.PhysicalWriteInfo info)
protected java.lang.Iterable<DataFile>
files(org.apache.spark.sql.connector.write.WriterCommitMessage[] messages)
protected FileFormat
getFileFormat(java.util.Map<java.lang.String,java.lang.String> tableProperties, java.util.Map<java.lang.String,java.lang.String> options)
protected Table
table()
java.lang.String
toString()
-
-
-
Method Detail
-
createStreamingWriterFactory
public org.apache.spark.sql.connector.write.streaming.StreamingDataWriterFactory createStreamingWriterFactory(org.apache.spark.sql.connector.write.PhysicalWriteInfo info)
- Specified by:
createStreamingWriterFactory
in interfaceorg.apache.spark.sql.connector.write.streaming.StreamingWrite
-
commit
public void commit(long epochId, org.apache.spark.sql.connector.write.WriterCommitMessage[] messages)
- Specified by:
commit
in interfaceorg.apache.spark.sql.connector.write.streaming.StreamingWrite
-
abort
public void abort(long epochId, org.apache.spark.sql.connector.write.WriterCommitMessage[] messages)
- Specified by:
abort
in interfaceorg.apache.spark.sql.connector.write.streaming.StreamingWrite
-
getFileFormat
protected FileFormat getFileFormat(java.util.Map<java.lang.String,java.lang.String> tableProperties, java.util.Map<java.lang.String,java.lang.String> options)
-
createBatchWriterFactory
public org.apache.iceberg.spark.source.SparkBatchWrite.WriterFactory createBatchWriterFactory(org.apache.spark.sql.connector.write.PhysicalWriteInfo info)
- Specified by:
createBatchWriterFactory
in interfaceorg.apache.spark.sql.connector.write.BatchWrite
-
commit
public void commit(org.apache.spark.sql.connector.write.WriterCommitMessage[] messages)
- Specified by:
commit
in interfaceorg.apache.spark.sql.connector.write.BatchWrite
-
commitOperation
protected void commitOperation(SnapshotUpdate<?> operation, int numFiles, java.lang.String description)
-
abort
public void abort(org.apache.spark.sql.connector.write.WriterCommitMessage[] messages)
- Specified by:
abort
in interfaceorg.apache.spark.sql.connector.write.BatchWrite
-
table
protected Table table()
-
files
protected java.lang.Iterable<DataFile> files(org.apache.spark.sql.connector.write.WriterCommitMessage[] messages)
-
toString
public java.lang.String toString()
- Overrides:
toString
in classjava.lang.Object
-
-