下面列出了org.apache.hadoop.mapreduce.lib.input.CombineFileSplit#getStartOffsets ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Override
public void initialize(InputSplit split, TaskAttemptContext context)
throws IOException {
Configuration conf = context.getConfiguration();
CombineFileSplit cSplit = (CombineFileSplit) split;
Path[] path = cSplit.getPaths();
long[] start = cSplit.getStartOffsets();
long[] len = cSplit.getLengths();
FileSystem fs = cSplit.getPath(0).getFileSystem(conf);
long startTS = conf.getLong(RowInputFormat.START_TIME_MILLIS, 0l);
long endTS = conf.getLong(RowInputFormat.END_TIME_MILLIS, 0l);
this.splitIterator = HDFSSplitIterator.newInstance(fs, path, start, len, startTS, endTS);
instantiateGfxdLoner(conf);
}
@Override
public void initialize(InputSplit split, TaskAttemptContext context)
throws IOException {
Configuration conf = context.getConfiguration();
CombineFileSplit cSplit = (CombineFileSplit) split;
Path[] path = cSplit.getPaths();
long[] start = cSplit.getStartOffsets();
long[] len = cSplit.getLengths();
FileSystem fs = cSplit.getPath(0).getFileSystem(conf);
long startTS = conf.getLong(RowInputFormat.START_TIME_MILLIS, 0l);
long endTS = conf.getLong(RowInputFormat.END_TIME_MILLIS, 0l);
this.splitIterator = HDFSSplitIterator.newInstance(fs, path, start, len, startTS, endTS);
instantiateGfxdLoner(conf);
}
/**
* @param split Description of input sources.
* @param conf Used to resolve FileSystem instances.
*/
public FileQueue(CombineFileSplit split, Configuration conf)
throws IOException {
this.conf = conf;
paths = split.getPaths();
startoffset = split.getStartOffsets();
lengths = split.getLengths();
nextSource();
}
@Override
public void initialize(InputSplit split, TaskAttemptContext context)
throws IOException, InterruptedException {
CombineFileSplit cSplit = (CombineFileSplit) split;
Path[] path = cSplit.getPaths();
long[] start = cSplit.getStartOffsets();
long[] len = cSplit.getLengths();
Configuration conf = context.getConfiguration();
FileSystem fs = cSplit.getPath(0).getFileSystem(conf);
this.splitIterator = HDFSSplitIterator.newInstance(fs, path, start, len, 0l, 0l);
}
/**
* @param split Description of input sources.
* @param conf Used to resolve FileSystem instances.
*/
public FileQueue(CombineFileSplit split, Configuration conf)
throws IOException {
this.conf = conf;
paths = split.getPaths();
startoffset = split.getStartOffsets();
lengths = split.getLengths();
nextSource();
}
public ParserPump(CombineFileSplit split, TaskAttemptContext context) {
this.context = context;
this.paths = split.getPaths();
this.sizes = split.getLengths();
this.offsets = split.getStartOffsets();
this.size = split.getLength();
Configuration conf = context.getConfiguration();
this.skipInvalid = conf.getBoolean(SKIP_INVALID_PROPERTY, false);
this.verifyDataTypeValues = conf.getBoolean(VERIFY_DATATYPE_VALUES_PROPERTY, false);
this.overrideRdfContext = conf.getBoolean(OVERRIDE_CONTEXT_PROPERTY, false);
this.defaultRdfContextPattern = conf.get(DEFAULT_CONTEXT_PROPERTY);
this.maxSize = MAX_SINGLE_FILE_MULTIPLIER * conf.getLong("mapreduce.input.fileinputformat.split.maxsize", 0);
}
@Override
public void initialize(InputSplit split, TaskAttemptContext context)
throws IOException, InterruptedException {
CombineFileSplit cSplit = (CombineFileSplit) split;
Path[] path = cSplit.getPaths();
long[] start = cSplit.getStartOffsets();
long[] len = cSplit.getLengths();
Configuration conf = context.getConfiguration();
FileSystem fs = cSplit.getPath(0).getFileSystem(conf);
this.splitIterator = HDFSSplitIterator.newInstance(fs, path, start, len, 0l, 0l);
}