下面列出了怎么用org.apache.hadoop.mapred.JobPriority的API类实例代码及写法,或者点击链接到github查看源代码。
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
HistoryEventEmitter thatg) {
JobID jobID = JobID.forName(jobIDName);
if (jobIDName == null) {
return null;
}
String priority = line.get("JOB_PRIORITY");
if (priority != null) {
return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
}
return null;
}
HistoryEvent maybeEmitEvent(ParsedLine line, String jobIDName,
HistoryEventEmitter thatg) {
JobID jobID = JobID.forName(jobIDName);
if (jobIDName == null) {
return null;
}
String priority = line.get("JOB_PRIORITY");
if (priority != null) {
return new JobPriorityChangeEvent(jobID, JobPriority.valueOf(priority));
}
return null;
}
/**
* simple test JobPriorityChangeEvent and JobPriorityChange
*
* @throws Exception
*/
@Test(timeout = 10000)
public void testJobPriorityChange() throws Exception {
org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
JobPriorityChangeEvent test = new JobPriorityChangeEvent(jid,
JobPriority.LOW);
assertEquals(test.getJobId().toString(), jid.toString());
assertEquals(test.getPriority(), JobPriority.LOW);
}
public static JobStatus fromYarn(JobReport jobreport, String trackingUrl) {
JobPriority jobPriority = JobPriority.NORMAL;
JobStatus jobStatus = new org.apache.hadoop.mapred.JobStatus(
fromYarn(jobreport.getJobId()), jobreport.getSetupProgress(), jobreport
.getMapProgress(), jobreport.getReduceProgress(), jobreport
.getCleanupProgress(), fromYarn(jobreport.getJobState()),
jobPriority, jobreport.getUser(), jobreport.getJobName(), jobreport
.getJobFile(), trackingUrl, jobreport.isUber());
jobStatus.setStartTime(jobreport.getStartTime());
jobStatus.setFinishTime(jobreport.getFinishTime());
jobStatus.setFailureInfo(jobreport.getDiagnostics());
return jobStatus;
}
public static JobStatus fromYarn(ApplicationReport application,
String jobFile) {
String trackingUrl = application.getTrackingUrl();
trackingUrl = trackingUrl == null ? "" : trackingUrl;
JobStatus jobStatus =
new JobStatus(
TypeConverter.fromYarn(application.getApplicationId()),
0.0f, 0.0f, 0.0f, 0.0f,
TypeConverter.fromYarn(application.getYarnApplicationState(), application.getFinalApplicationStatus()),
org.apache.hadoop.mapreduce.JobPriority.NORMAL,
application.getUser(), application.getName(),
application.getQueue(), jobFile, trackingUrl, false
);
jobStatus.setSchedulingInfo(trackingUrl); // Set AM tracking url
jobStatus.setStartTime(application.getStartTime());
jobStatus.setFinishTime(application.getFinishTime());
jobStatus.setFailureInfo(application.getDiagnostics());
ApplicationResourceUsageReport resourceUsageReport =
application.getApplicationResourceUsageReport();
if (resourceUsageReport != null) {
jobStatus.setNeededMem(
resourceUsageReport.getNeededResources().getMemory());
jobStatus.setNumReservedSlots(
resourceUsageReport.getNumReservedContainers());
jobStatus.setNumUsedSlots(resourceUsageReport.getNumUsedContainers());
jobStatus.setReservedMem(
resourceUsageReport.getReservedResources().getMemory());
jobStatus.setUsedMem(resourceUsageReport.getUsedResources().getMemory());
}
return jobStatus;
}
/** Create a job info object where job information will be stored
* after a parse
*/
public JobInfo() {
submitTime = launchTime = finishTime = -1;
totalMaps = totalReduces = failedMaps = failedReduces = 0;
finishedMaps = finishedReduces = 0;
username = jobname = jobConfPath = jobQueueName = "";
tasksMap = new HashMap<TaskID, TaskInfo>();
completedTaskAttemptsMap = new HashMap<TaskAttemptID, TaskAttemptInfo>();
jobACLs = new HashMap<JobACL, AccessControlList>();
priority = JobPriority.NORMAL;
}
/**
* simple test JobPriorityChangeEvent and JobPriorityChange
*
* @throws Exception
*/
@Test(timeout = 10000)
public void testJobPriorityChange() throws Exception {
org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
JobPriorityChangeEvent test = new JobPriorityChangeEvent(jid,
JobPriority.LOW);
assertEquals(test.getJobId().toString(), jid.toString());
assertEquals(test.getPriority(), JobPriority.LOW);
}
public static JobStatus fromYarn(JobReport jobreport, String trackingUrl) {
JobPriority jobPriority = JobPriority.NORMAL;
JobStatus jobStatus = new org.apache.hadoop.mapred.JobStatus(
fromYarn(jobreport.getJobId()), jobreport.getSetupProgress(), jobreport
.getMapProgress(), jobreport.getReduceProgress(), jobreport
.getCleanupProgress(), fromYarn(jobreport.getJobState()),
jobPriority, jobreport.getUser(), jobreport.getJobName(), jobreport
.getJobFile(), trackingUrl, jobreport.isUber());
jobStatus.setStartTime(jobreport.getStartTime());
jobStatus.setFinishTime(jobreport.getFinishTime());
jobStatus.setFailureInfo(jobreport.getDiagnostics());
return jobStatus;
}
public static JobStatus fromYarn(ApplicationReport application,
String jobFile) {
String trackingUrl = application.getTrackingUrl();
trackingUrl = trackingUrl == null ? "" : trackingUrl;
JobStatus jobStatus =
new JobStatus(
TypeConverter.fromYarn(application.getApplicationId()),
0.0f, 0.0f, 0.0f, 0.0f,
TypeConverter.fromYarn(application.getYarnApplicationState(), application.getFinalApplicationStatus()),
org.apache.hadoop.mapreduce.JobPriority.NORMAL,
application.getUser(), application.getName(),
application.getQueue(), jobFile, trackingUrl, false
);
jobStatus.setSchedulingInfo(trackingUrl); // Set AM tracking url
jobStatus.setStartTime(application.getStartTime());
jobStatus.setFinishTime(application.getFinishTime());
jobStatus.setFailureInfo(application.getDiagnostics());
ApplicationResourceUsageReport resourceUsageReport =
application.getApplicationResourceUsageReport();
if (resourceUsageReport != null) {
jobStatus.setNeededMem(
resourceUsageReport.getNeededResources().getMemory());
jobStatus.setNumReservedSlots(
resourceUsageReport.getNumReservedContainers());
jobStatus.setNumUsedSlots(resourceUsageReport.getNumUsedContainers());
jobStatus.setReservedMem(
resourceUsageReport.getReservedResources().getMemory());
jobStatus.setUsedMem(resourceUsageReport.getUsedResources().getMemory());
}
return jobStatus;
}
/** Create a job info object where job information will be stored
* after a parse
*/
public JobInfo() {
submitTime = launchTime = finishTime = -1;
totalMaps = totalReduces = failedMaps = failedReduces = 0;
finishedMaps = finishedReduces = 0;
username = jobname = jobConfPath = jobQueueName = "";
tasksMap = new HashMap<TaskID, TaskInfo>();
completedTaskAttemptsMap = new HashMap<TaskAttemptID, TaskAttemptInfo>();
jobACLs = new HashMap<JobACL, AccessControlList>();
priority = JobPriority.NORMAL;
}
/** Generate an event to record changes in Job priority
* @param id Job Id
* @param priority The new priority of the job
*/
public JobPriorityChangeEvent(JobID id, JobPriority priority) {
datum.jobid = new Utf8(id.toString());
datum.priority = new Utf8(priority.name());
}
/** Get the job priority */
public JobPriority getPriority() {
return JobPriority.valueOf(datum.priority.toString());
}
/** Generate an event to record changes in Job priority
* @param id Job Id
* @param priority The new priority of the job
*/
public JobPriorityChangeEvent(JobID id, JobPriority priority) {
datum.jobid = new Utf8(id.toString());
datum.priority = new Utf8(priority.name());
}
/** Get the job priority */
public JobPriority getPriority() {
return JobPriority.valueOf(datum.priority.toString());
}