public abstract class HadoopWriteConfigUtil<K,V>
extends Object
implements scala.Serializable
Notes:
1. Implementations should throw IllegalArgumentException
when wrong hadoop API is
referenced;
2. Implementations must be serializable, as the instance instantiated on the driver
will be used for tasks on executors;
3. Implementations should have a constructor with exactly one argument:
(conf: SerializableConfiguration) or (conf: SerializableJobConf).
Constructor and Description |
---|
HadoopWriteConfigUtil(scala.reflect.ClassTag<V> evidence$1) |
Modifier and Type | Method and Description |
---|---|
abstract void |
assertConf(org.apache.hadoop.mapreduce.JobContext jobContext,
SparkConf conf) |
abstract void |
closeWriter(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext) |
abstract HadoopMapReduceCommitProtocol |
createCommitter(int jobId) |
abstract org.apache.hadoop.mapreduce.JobContext |
createJobContext(String jobTrackerId,
int jobId) |
abstract org.apache.hadoop.mapreduce.TaskAttemptContext |
createTaskAttemptContext(String jobTrackerId,
int jobId,
int splitId,
int taskAttemptId) |
abstract void |
initOutputFormat(org.apache.hadoop.mapreduce.JobContext jobContext) |
abstract void |
initWriter(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext,
int splitId) |
abstract void |
write(scala.Tuple2<K,V> pair) |
public HadoopWriteConfigUtil(scala.reflect.ClassTag<V> evidence$1)
public abstract org.apache.hadoop.mapreduce.JobContext createJobContext(String jobTrackerId, int jobId)
public abstract org.apache.hadoop.mapreduce.TaskAttemptContext createTaskAttemptContext(String jobTrackerId, int jobId, int splitId, int taskAttemptId)
public abstract HadoopMapReduceCommitProtocol createCommitter(int jobId)
public abstract void initWriter(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext, int splitId)
public abstract void closeWriter(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext)
public abstract void initOutputFormat(org.apache.hadoop.mapreduce.JobContext jobContext)
public abstract void assertConf(org.apache.hadoop.mapreduce.JobContext jobContext, SparkConf conf)