org.quartz.JobDetail#setJobDataMap ( )源码实例Demo

下面列出了org.quartz.JobDetail#setJobDataMap ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

/**
 * @return
 */
public JobDetail build() {
	Assert.assertNotNull(documentExportConf.getDocumentId(), "Attribute documentId cannot be null");
	Assert.assertNotNull(documentExportConf.getDocumentLabel(), "Attribute document label cannot be null");
	Assert.assertNotNull(documentExportConf.getExportType(), "Attribute export type cannot be null");

	Assert.assertNotNull(userProfile, "Attribute userProfile cannot be null");

	String jobDescription = String.format("Export of dataset %d to %s", documentExportConf.getDocumentId(), documentExportConf.getExportType());

	JobDataMap jobDataMap = new JobDataMap();
	jobDataMap.put(CockpitDataExportConstans.DOC_EXP_CONF, documentExportConf);
	jobDataMap.put(CockpitDataExportConstans.LOCALE, getLocale());
	jobDataMap.put(CockpitDataExportConstans.USER_PROFILE, getUserProfile());
	jobDataMap.put(CockpitDataExportConstans.RESOURCE_PATH, resoursePath);
	jobDataMap.put(CockpitDataExportConstans.JOB_ID, randomUUID);

	JobDetail job = new JobDetail("export_" + randomUUID, "export", CockpitDataExportJob.class);
	job.setDescription(jobDescription);
	job.setJobDataMap(jobDataMap);
	job.setDurability(false);

	return job;
}
 
源代码2 项目: AsuraFramework   文件: JobDetailSupport.java
/**
 * @param cData
 * @return JobDetail
 */
public static JobDetail newJobDetail(CompositeData cData) {
	JobDetail jobDetail = new JobDetail();

	int i = 0;
	jobDetail.setName((String) cData.get(ITEM_NAMES[i++]));
	jobDetail.setGroup((String) cData.get(ITEM_NAMES[i++]));
	jobDetail.setDescription((String) cData.get(ITEM_NAMES[i++]));
	try {
		Class c = Class.forName((String) cData.get(ITEM_NAMES[i++]));
		jobDetail.setJobClass(c);
	} catch (ClassNotFoundException cnfe) {
		/**/
	}
	jobDetail.setJobDataMap(JobDataMapSupport
			.newJobDataMap((TabularData) cData.get(ITEM_NAMES[i++])));
	jobDetail.setVolatility((Boolean) cData.get(ITEM_NAMES[i++]));
	jobDetail.setDurability((Boolean) cData.get(ITEM_NAMES[i++]));
	jobDetail.setRequestsRecovery((Boolean) cData.get(ITEM_NAMES[i++]));

	return jobDetail;
}
 
源代码3 项目: Lottery   文件: CmsTaskAct.java
/**
 * 开始任务调度
 * @param task 任务
 * @param taskCode 任务名称
 * @throws ParseException
 * @throws SchedulerException
 * @throws ClassNotFoundException
 */
private void startTask(CmsTask task,String taskCode) throws ParseException, SchedulerException, ClassNotFoundException{
	String cronExpress=manager.getCronExpressionFromDB(task.getId());
	System.out.println(cronExpress);
	if(cronExpress.indexOf("null")==-1){
		JobDetail jobDetail = new JobDetail();
		jobDetail.setName(taskCode);
		jobDetail.setGroup(Scheduler.DEFAULT_GROUP);
		jobDetail.setJobClass(getClassByTask(task.getJobClass()));
		//任务需要参数attr属性 
		jobDetail.setJobDataMap(getJobDataMap(task.getAttr()));
		CronTrigger cronTrigger = new CronTrigger("cron_" + taskCode,Scheduler.DEFAULT_GROUP, jobDetail.getName(),Scheduler.DEFAULT_GROUP);
		cronTrigger.setCronExpression(cronExpress);
		scheduler.scheduleJob(jobDetail, cronTrigger); 
	}
}
 
源代码4 项目: webcurator   文件: SchedulerUtil.java
/**
 * Schedule the harvest completion to run after a specified delay to allow the 
    * harvester to release all its resources or after a failure to contace the 
    * core or the digital asset store
 * @param aHarvestName the name of the harvest job
 * @param aFailueStep the step that the completion failed at
 * @param aMessageSent a flag to indicated that the failure notification has been sent
 * @param aRetries the number of retries attempted
 * @throws SchedulerException thrown if there is a problem scheduling the quartz job
 */
public static final void scheduleHarvestCompleteJob(String aHarvestName, int aFailueStep, boolean aMessageSent, int aRetries) throws SchedulerException {
	ApplicationContext context = ApplicationContextFactory.getWebApplicationContext();
	Scheduler scheduler = (Scheduler) context.getBean(Constants.BEAN_SCHEDULER_FACTORY);
	HarvestCompleteConfig hcc = (HarvestCompleteConfig) context.getBean(Constants.BEAN_HARVEST_COMPLETE_CONFIG);
    
       JobDetail job = new JobDetail(JOB_NAME_COMPLETE + SEPERATOR + aHarvestName + SEPERATOR + aRetries, JOB_GROUP_COMPLETE + SEPERATOR + aHarvestName, HarvestCompleteJob.class);
       JobDataMap jdm = new JobDataMap();
       jdm.put(HarvestCompleteJob.PARAM_JOB_NAME, aHarvestName);
       jdm.put(HarvestCompleteJob.PARAM_FAILURE_STEP, new Integer(aFailueStep));
       jdm.put(HarvestCompleteJob.PARAM_MSG_SENT, new Boolean(aMessageSent));
       jdm.put(HarvestCompleteJob.PARAM_RETRIES, new Integer(aRetries));
       job.setJobDataMap(jdm);

       // Set the complete job to run xx seconds after we get the notification
       Calendar cal = Calendar.getInstance();
       if (aRetries == 0) {
       	cal.add(Calendar.SECOND, hcc.getWaitOnCompleteSeconds());
       }
       else if (aRetries < hcc.getLevelRetryBand()) {
       	cal.add(Calendar.SECOND, hcc.getWaitOnFailureLevelOneSecs());
       }
       else {
       	cal.add(Calendar.SECOND, hcc.getWaitOnFailureLevelTwoSecs());
       }
               
       Trigger trigger = new SimpleTrigger(TRG_NAME_COMPLETE + SEPERATOR + aHarvestName + SEPERATOR + aRetries, TRG_GROUP_COMPLETE + SEPERATOR + aHarvestName, cal.getTime());                       
       scheduler.scheduleJob(job, trigger);
}
 
源代码5 项目: webcurator   文件: SchedulerUtil.java
/**
 * Schedule the harvest completion to run after a specified delay to allow the 
    * harvester to release all its resources or after a failure to contace the 
    * core or the digital asset store
 * @param aHarvestName the name of the harvest job
 * @param aFailueStep the step that the completion failed at
 * @param aMessageSent a flag to indicated that the failure notification has been sent
 * @param aRetries the number of retries attempted
 * @throws SchedulerException thrown if there is a problem scheduling the quartz job
 */
public static final void scheduleHarvestCompleteJob(String aHarvestName, int aFailueStep, boolean aMessageSent, int aRetries) throws SchedulerException {
	ApplicationContext context = ApplicationContextFactory.getWebApplicationContext();
	Scheduler scheduler = (Scheduler) context.getBean(Constants.BEAN_SCHEDULER_FACTORY);
	HarvestCompleteConfig hcc = (HarvestCompleteConfig) context.getBean(Constants.BEAN_HARVEST_COMPLETE_CONFIG);
    
       JobDetail job = new JobDetail(JOB_NAME_COMPLETE + SEPERATOR + aHarvestName + SEPERATOR + aRetries, JOB_GROUP_COMPLETE + SEPERATOR + aHarvestName, HarvestCompleteJob.class);
       JobDataMap jdm = new JobDataMap();
       jdm.put(HarvestCompleteJob.PARAM_JOB_NAME, aHarvestName);
       jdm.put(HarvestCompleteJob.PARAM_FAILURE_STEP, new Integer(aFailueStep));
       jdm.put(HarvestCompleteJob.PARAM_MSG_SENT, new Boolean(aMessageSent));
       jdm.put(HarvestCompleteJob.PARAM_RETRIES, new Integer(aRetries));
       job.setJobDataMap(jdm);

       // Set the complete job to run xx seconds after we get the notification
       Calendar cal = Calendar.getInstance();
       if (aRetries == 0) {
       	cal.add(Calendar.SECOND, hcc.getWaitOnCompleteSeconds());
       }
       else if (aRetries < hcc.getLevelRetryBand()) {
       	cal.add(Calendar.SECOND, hcc.getWaitOnFailureLevelOneSecs());
       }
       else {
       	cal.add(Calendar.SECOND, hcc.getWaitOnFailureLevelTwoSecs());
       }
               
       Trigger trigger = new SimpleTrigger(TRG_NAME_COMPLETE + SEPERATOR + aHarvestName + SEPERATOR + aRetries, TRG_GROUP_COMPLETE + SEPERATOR + aHarvestName, cal.getTime());                       
       scheduler.scheduleJob(job, trigger);
}
 
源代码6 项目: kfs   文件: SchedulerServiceImplTest.java
protected void scheduleJob(String groupName, String jobName, int startStep, int endStep, Date startTime, String requestorEmailAddress, Map<String,String> additionalJobData ) {
    Scheduler scheduler = (Scheduler) SpringContext.getService("scheduler");
    try {
        JobDetail jobDetail = scheduler.getJobDetail(jobName, groupName);
        if ( jobDetail == null ) {
            fail( "Unable to retrieve JobDetail object for " + groupName + " : " + jobName );
        }
        if ( jobDetail.getJobDataMap() == null ) {
            jobDetail.setJobDataMap( new JobDataMap() );
        }
        jobDetail.getJobDataMap().put(SchedulerService.JOB_STATUS_PARAMETER, SchedulerService.SCHEDULED_JOB_STATUS_CODE);
        scheduler.addJob(jobDetail, true);

        SimpleTriggerDescriptor trigger = new SimpleTriggerDescriptor(jobName+startTime, groupName, jobName, SpringContext.getBean(DateTimeService.class));
        trigger.setStartTime(startTime);
        Trigger qTrigger = trigger.getTrigger();
        qTrigger.getJobDataMap().put(JobListener.REQUESTOR_EMAIL_ADDRESS_KEY, requestorEmailAddress);
        qTrigger.getJobDataMap().put(Job.JOB_RUN_START_STEP, String.valueOf(startStep));
        qTrigger.getJobDataMap().put(Job.JOB_RUN_END_STEP, String.valueOf(endStep));
        if ( additionalJobData != null ) {
            qTrigger.getJobDataMap().putAll(additionalJobData);
        }
        scheduler.scheduleJob(qTrigger);
    }
    catch (SchedulerException e) {
        throw new RuntimeException("Caught exception while scheduling job: " + jobName, e);
    }
}
 
源代码7 项目: spacewalk   文件: ScheduleManager.java
private void scheduleJob(Scheduler sched, String name,
        int mode, long interval, Class task, JobDataMap data)
    throws SchedulerException {

    Trigger t = createTrigger(name, updateIndexGroupName, mode, interval);
    JobDetail d = new JobDetail(name, updateIndexGroupName, task);
    d.setJobDataMap(data);
    sched.scheduleJob(d, t);
}
 
源代码8 项目: Knowage-Server   文件: ExportResource.java
/**
 * Schedula a job to clean old export.
 *
 * @throws SchedulerException In case of error during scheduling
 */
private void scheduleCleanUp() throws SchedulerException {

	UserProfile userProfile = UserProfileManager.getProfile();
	String resoursePath = SpagoBIUtilities.getResourcePath();

	String jobName = String.format("delete-old-export-for-%s", userProfile.getUserId());
	String jobGroup = "delete-old-export";
	String jobDescription = String.format("Delete old exports for user %s", userProfile.getUserId());

	JobDataMap jobDataMap = new JobDataMap();

	jobDataMap.put(ExportDeleteOldJob.MAP_KEY_RESOURCE_PATH, resoursePath);
	jobDataMap.put(ExportDeleteOldJob.MAP_KEY_USER_PROFILE, userProfile);

	JobDetail job = new JobDetail(jobName, jobGroup, ExportDeleteOldJob.class);

	job.setDescription(jobDescription);
	job.setJobDataMap(jobDataMap);

	Scheduler scheduler = StdSchedulerFactory.getDefaultScheduler();

	scheduler.addJob(job, true);
	scheduler.triggerJob(job.getName(), job.getGroup());

}