下面列出了怎么用org.apache.hadoop.mapreduce.split.SplitMetaInfoReader的API类实例代码及写法,或者点击链接到github查看源代码。
private List<TaskLocationHint> getMapLocationHintsFromInputSplits(JobID jobId,
FileSystem fs, Configuration conf,
String jobSubmitDir) throws IOException {
TaskSplitMetaInfo[] splitsInfo =
SplitMetaInfoReader.readSplitMetaInfo(jobId, fs, conf,
new Path(jobSubmitDir));
int splitsCount = splitsInfo.length;
List<TaskLocationHint> locationHints =
new ArrayList<TaskLocationHint>(splitsCount);
for (int i = 0; i < splitsCount; ++i) {
TaskLocationHint locationHint =
new TaskLocationHint(
new HashSet<String>(
Arrays.asList(splitsInfo[i].getLocations())), null);
locationHints.add(locationHint);
}
return locationHints;
}
private void verifyLocationHints(Path inputSplitsDir,
List<TaskLocationHint> actual) throws Exception {
JobID jobId = new JobID("dummy", 1);
TaskSplitMetaInfo[] splitsInfo =
SplitMetaInfoReader.readSplitMetaInfo(jobId , remoteFs,
conf, inputSplitsDir);
int splitsCount = splitsInfo.length;
List<TaskLocationHint> locationHints =
new ArrayList<TaskLocationHint>(splitsCount);
for (int i = 0; i < splitsCount; ++i) {
locationHints.add(
new TaskLocationHint(new HashSet<String>(
Arrays.asList(splitsInfo[i].getLocations())), null));
}
Assert.assertEquals(locationHints, actual);
}
private List<TaskLocationHint> getMapLocationHintsFromInputSplits(JobID jobId,
FileSystem fs, Configuration conf,
String jobSubmitDir) throws IOException {
TaskSplitMetaInfo[] splitsInfo =
SplitMetaInfoReader.readSplitMetaInfo(jobId, fs, conf,
new Path(jobSubmitDir));
int splitsCount = splitsInfo.length;
List<TaskLocationHint> locationHints =
new ArrayList<TaskLocationHint>(splitsCount);
for (int i = 0; i < splitsCount; ++i) {
TaskLocationHint locationHint =
TaskLocationHint.createTaskLocationHint(
new HashSet<String>(
Arrays.asList(splitsInfo[i].getLocations())), null
);
locationHints.add(locationHint);
}
return locationHints;
}
private void verifyLocationHints(Path inputSplitsDir,
List<TaskLocationHint> actual) throws Exception {
JobID jobId = new JobID("dummy", 1);
JobSplit.TaskSplitMetaInfo[] splitsInfo =
SplitMetaInfoReader.readSplitMetaInfo(jobId, remoteFs,
conf, inputSplitsDir);
int splitsCount = splitsInfo.length;
List<TaskLocationHint> locationHints =
new ArrayList<TaskLocationHint>(splitsCount);
for (int i = 0; i < splitsCount; ++i) {
locationHints.add(
TaskLocationHint.createTaskLocationHint(new HashSet<String>(
Arrays.asList(splitsInfo[i].getLocations())), null)
);
}
Assert.assertEquals(locationHints, actual);
}
protected TaskSplitMetaInfo[] createSplits(JobImpl job, JobId jobId) {
TaskSplitMetaInfo[] allTaskSplitMetaInfo;
try {
allTaskSplitMetaInfo = SplitMetaInfoReader.readSplitMetaInfo(
job.oldJobId, job.fs,
job.conf,
job.remoteJobSubmitDir);
} catch (IOException e) {
throw new YarnRuntimeException(e);
}
return allTaskSplitMetaInfo;
}
protected TaskSplitMetaInfo[] createSplits(JobImpl job, JobId jobId) {
TaskSplitMetaInfo[] allTaskSplitMetaInfo;
try {
allTaskSplitMetaInfo = SplitMetaInfoReader.readSplitMetaInfo(
job.oldJobId, job.fs,
job.conf,
job.remoteJobSubmitDir);
} catch (IOException e) {
throw new YarnRuntimeException(e);
}
return allTaskSplitMetaInfo;
}