org.quartz.JobExecutionContext#getMergedJobDataMap ( )源码实例Demo

下面列出了org.quartz.JobExecutionContext#getMergedJobDataMap ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: AsuraFramework   文件: SendMailJob.java
/**
 * @see org.quartz.Job#execute(org.quartz.JobExecutionContext)
 */
public void execute(JobExecutionContext context)
    throws JobExecutionException {

    JobDataMap data = context.getMergedJobDataMap();

    MailInfo mailInfo = populateMailInfo(data, createMailInfo());
    
    getLog().info("Sending message " + mailInfo);

    try {
        MimeMessage mimeMessage = prepareMimeMessage(mailInfo);
        
        Transport.send(mimeMessage);
    } catch (MessagingException e) {
        throw new JobExecutionException("Unable to send mail: " + mailInfo,
                e, false);
    }

}
 
源代码2 项目: spring-boot-quartz-demo   文件: CronJob.java
@Override
protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException {
	JobKey key = jobExecutionContext.getJobDetail().getKey();
	System.out.println("Cron Job started with key :" + key.getName() + ", Group :"+key.getGroup() + " , Thread Name :"+Thread.currentThread().getName() + " ,Time now :"+new Date());
	
	System.out.println("======================================");
	System.out.println("Accessing annotation example: "+jobService.getAllJobs());
	List<Map<String, Object>> list = jobService.getAllJobs();
	System.out.println("Job list :"+list);
	System.out.println("======================================");
	
	//*********** For retrieving stored key-value pairs ***********/
	JobDataMap dataMap = jobExecutionContext.getMergedJobDataMap();
	String myValue = dataMap.getString("myKey");
	System.out.println("Value:" + myValue);

	System.out.println("Thread: "+ Thread.currentThread().getName() +" stopped.");
}
 
源代码3 项目: AsuraFramework   文件: NativeJob.java
public void execute(JobExecutionContext context)
    throws JobExecutionException {

    JobDataMap data = context.getMergedJobDataMap();
    
    String command = data.getString(PROP_COMMAND);

    String parameters = data.getString(PROP_PARAMETERS);

    if (parameters == null) {
        parameters = "";
    }

    boolean wait = true;
    if(data.containsKey(PROP_WAIT_FOR_PROCESS)) {
        wait = data.getBooleanValue(PROP_WAIT_FOR_PROCESS);
    }
    boolean consumeStreams = false;
    if(data.containsKey(PROP_CONSUME_STREAMS)) {
        consumeStreams = data.getBooleanValue(PROP_CONSUME_STREAMS);
    }
        
    Integer exitCode = this.runNativeCommand(command, parameters, wait, consumeStreams);
    context.setResult(exitCode);
    
}
 
源代码4 项目: chronix.server   文件: RetentionJob.java
/**
 * Executes the job that calls the retention plugin.
 *
 * @param context the current job context
 * @throws JobExecutionException if the solr server could not be reached.
 */
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    LOGGER.info("Starting retention job");
    JobDataMap data = context.getMergedJobDataMap();

    String url = data.getString(RetentionConstants.RETENTION_URL);
    HttpGet httpget = new HttpGet(url);

    try {
        CloseableHttpResponse response = httpClient.execute(httpget);
        LOGGER.info("Response was {}", response);
    } catch (IOException e) {
        throw new JobExecutionException("Could not execute http get request " + httpget, e);
    }

}
 
源代码5 项目: sakai   文件: Assignment12ConversionJob.java
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    log.info("<===== Assignment Conversion Job start =====>");

    // never run as a recovery
    if (context.isRecovering()) {
        log.warn("<===== Assignment Conversion Job doesn't support recovery, job will terminate... =====>");
    } else {
        JobDataMap map = context.getMergedJobDataMap();
        Integer size = Integer.parseInt((String) map.get(SIZE_PROPERTY));
        Integer number = Integer.parseInt((String) map.get(NUMBER_PROPERTY));
        assignmentConversionService.runConversion(number, size);
    }

    log.info("<===== Assignment Conversion Job end =====>");
}
 
源代码6 项目: sakai   文件: Assignment12ConversionJob.java
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    log.info("<===== Assignment Conversion Job start =====>");

    // never run as a recovery
    if (context.isRecovering()) {
        log.warn("<===== Assignment Conversion Job doesn't support recovery, job will terminate... =====>");
    } else {
        JobDataMap map = context.getMergedJobDataMap();
        Integer size = Integer.parseInt((String) map.get(SIZE_PROPERTY));
        Integer number = Integer.parseInt((String) map.get(NUMBER_PROPERTY));
        assignmentConversionService.runConversion(number, size);
    }

    log.info("<===== Assignment Conversion Job end =====>");
}
 
源代码7 项目: zkdoctor   文件: ZKJob.java
@Override
public void action(JobExecutionContext context) {
    try {
        JobDataMap dataMap = context.getMergedJobDataMap();
        int clusterId = dataMap.getInt(SchedulerConstant.CLUSTER_KEY);
        collectService.collectZKInfo(clusterId);
    } catch (Exception e) {
        LOGGER.error("ZKJob execute failed.", e);
    }
}
 
源代码8 项目: spring-boot-quartz-demo   文件: SimpleJob.java
@Override
protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException {
	JobKey key = jobExecutionContext.getJobDetail().getKey();
	System.out.println("Simple Job started with key :" + key.getName() + ", Group :"+key.getGroup() + " , Thread Name :"+Thread.currentThread().getName());
	
	System.out.println("======================================");
	System.out.println("Accessing annotation example: "+jobService.getAllJobs());
	List<Map<String, Object>> list = jobService.getAllJobs();
	System.out.println("Job list :"+list);
	System.out.println("======================================");
	
	//*********** For retrieving stored key-value pairs ***********/
	JobDataMap dataMap = jobExecutionContext.getMergedJobDataMap();
	String myValue = dataMap.getString("myKey");
	System.out.println("Value:" + myValue);

	//*********** For retrieving stored object, It will try to deserialize the bytes Object. ***********/
	/*
	SchedulerContext schedulerContext = null;
       try {
           schedulerContext = jobExecutionContext.getScheduler().getContext();
       } catch (SchedulerException e1) {
           e1.printStackTrace();
       }
       YourClass yourClassObject = (YourClass) schedulerContext.get("storedObjectKey");
	 */

	while(toStopFlag){
		try {
			System.out.println("Test Job Running... Thread Name :"+Thread.currentThread().getName());
			Thread.sleep(2000);
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}
	System.out.println("Thread: "+ Thread.currentThread().getName() +" stopped.");
}
 
源代码9 项目: elasticsearch-imap   文件: MailFlowJob.java
@Override
public void execute(final JobExecutionContext context) throws JobExecutionException {

    final JobKey key = context.getJobDetail().getKey();

    logger.debug("Executing mail flow job {}", key.toString());

    final JobDataMap data = context.getMergedJobDataMap();

    mailSource = (MailSource) data.get("mailSource");
    pattern = (Pattern) data.get("pattern");
    
    Client client = (Client) data.get("client");

    try {
        IMAPImporter.waitForYellowCluster(client);
        execute();
    } catch (final Exception e) {
        logger.error("Error in mail flow job {}: {} job", e, key.toString(), e.toString());
        final JobExecutionException e2 = new JobExecutionException(e);
        // this job will refire immediately
        // e2.refireImmediately();
        throw e2;
    }

    logger.debug("End of mail flow job with no errors {}", key.toString());

}
 
源代码10 项目: cachecloud   文件: InspectorJob.java
@Override
public void action(JobExecutionContext context) {
    try {
        long start = System.currentTimeMillis();
        SchedulerContext schedulerContext = context.getScheduler().getContext();
        ApplicationContext applicationContext = (ApplicationContext) schedulerContext.get(APPLICATION_CONTEXT_KEY);
        // 应用相关
        InspectHandler inspectHandler;
        JobDataMap jobDataMap = context.getMergedJobDataMap();
        String inspectorType = MapUtils.getString(jobDataMap, "inspectorType");
        if (StringUtils.isBlank(inspectorType)) {
            logger.error("=====================InspectorJob:inspectorType is null=====================");
            return;
        } else if (inspectorType.equals("host")) {
            inspectHandler = applicationContext.getBean("hostInspectHandler", InspectHandler.class);
        } else if (inspectorType.equals("app")) {
            inspectHandler = applicationContext.getBean("appInspectHandler", InspectHandler.class);
        } else {
            logger.error("=====================InspectorJob:inspectorType not match:{}=====================", inspectorType);
            return;
        }
        inspectHandler.handle();
        long end = System.currentTimeMillis();
        logger.info("=====================InspectorJob {} Done! cost={} ms=====================",
                inspectHandler.getClass().getSimpleName(), (end - start));
    } catch (Exception e) {
        logger.error(e.getMessage(), e);
        throw new RuntimeException(e);
    }
}
 
源代码11 项目: cachecloud   文件: MachineMonitorJob.java
@Override
public void action(JobExecutionContext context) {
    try {
        JobDataMap dataMap = context.getMergedJobDataMap();
        String ip = dataMap.getString(ConstUtils.HOST_KEY);
        long hostId = dataMap.getLong(ConstUtils.HOST_ID_KEY);

        SchedulerContext schedulerContext = context.getScheduler().getContext();
        ApplicationContext applicationContext = (ApplicationContext) schedulerContext.get(APPLICATION_CONTEXT_KEY);
        MachineCenter machineCenter = applicationContext.getBean("machineCenter", MachineCenter.class);
        machineCenter.asyncMonitorMachineStats(hostId, ip);
    } catch (SchedulerException e) {
        logger.error(e.getMessage(), e);
    }
}
 
源代码12 项目: spring-boot-cookbook   文件: EchoJob.java
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    LOGGER.info("================job start================");
    JobDataMap jobDataMap = context.getMergedJobDataMap();
    echoBiz.echo(jobDataMap.getIntValue(RANDOM_VALUE));
    echoBiz.echo(jobDataMap.getString(CREATE_TIME));
    LOGGER.info("================job end================");
}
 
源代码13 项目: sakai   文件: SynchronizationJob.java
public void executeInternal(JobExecutionContext jec) throws JobExecutionException {
    log.info("Starting Integration Job");

    JobDataMap jdm = jec.getMergedJobDataMap();

    if (dataProcessors != null) {
        for (DataProcessor dp : dataProcessors) {
            ProcessorState state = null;

            try {
                state = dp.init(jdm);
                dp.preProcess(state);
                dp.process(state);
                dp.postProcess(state);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            } finally {
                sendEmail(dp, state);
                if (state != null) {
                    state.reset();
                }
            }
        }
    } else {
        throw new JobExecutionException("Data processors list has not been set.");
    }

    log.info("Integration Job Complete");
}
 
源代码14 项目: sakai   文件: SynchronizationJob.java
public void executeInternal(JobExecutionContext jec) throws JobExecutionException {
    log.info("Starting Integration Job");

    JobDataMap jdm = jec.getMergedJobDataMap();

    if (dataProcessors != null) {
        for (DataProcessor dp : dataProcessors) {
            ProcessorState state = null;

            try {
                state = dp.init(jdm);
                dp.preProcess(state);
                dp.process(state);
                dp.postProcess(state);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            } finally {
                sendEmail(dp, state);
                if (state != null) {
                    state.reset();
                }
            }
        }
    } else {
        throw new JobExecutionException("Data processors list has not been set.");
    }

    log.info("Integration Job Complete");
}
 
源代码15 项目: EserKnife   文件: ClusterStatJob.java
/**
 * 定时收集集群信息
 *
 */
@Override
public void action(JobExecutionContext context) {
    try {
        JobDataMap dataMap = context.getMergedJobDataMap();
        Date date = context.getTrigger().getPreviousFireTime();
    	String clusterName = dataMap.getString(Constant.CLUSTER_NAME);
    	LOGGER.info("开始统计"+clusterName+"的信息!");
        NodesStats nodesStats= new NodesStats.Builder().withJvm().withOs().withIndices()
                .withHttp().withTransport().withThreadPool().withFs().build();
        JestResult result = JestManager.getJestClient(clusterName).execute(nodesStats);
    	JSONObject json = JSONObject.parseObject(result.getJsonString());
    	if(json == null) {
            return;
    	}
        asyncService.submitFuture(new CollectionCommonStatHandler(JobKey.buildFutureKey(clusterName, Constant.INDICES, date), clusterName, json, date));
        asyncService.submitFuture(new CollectionIndicesStatHandler(JobKey.buildFutureKey(clusterName, Constant.INDICES, date), clusterName, json, date));
        asyncService.submitFuture(new CollectionHttpStatHandler(JobKey.buildFutureKey(clusterName, Constant.HTTP, date), clusterName, json, date));
    	asyncService.submitFuture(new CollectionJVMStatHandler(JobKey.buildFutureKey(clusterName, Constant.JVM_NAME, date), clusterName, json, date));
        asyncService.submitFuture(new CollectionTransportStatHandler(JobKey.buildFutureKey(clusterName, Constant.TRANSPORT, date), clusterName, json, date));
        asyncService.submitFuture(new CollectionThreadPoolStatHandler(JobKey.buildFutureKey(clusterName, Constant.THREAD_POOL, date), clusterName, json, date));
        asyncService.submitFuture(new CollectionOsStatHandler(JobKey.buildFutureKey(clusterName, Constant.OS, date), clusterName, json, date));
        asyncService.submitFuture(new CollectionFsStatHandler(JobKey.buildFutureKey(clusterName, Constant.FS, date), clusterName, json, date));

        //报警处理
        List<AlarmRule> all = alarmRuleService.getList();
        List<AlarmRule> alarmFilterRule = new ArrayList<AlarmRule>();
        if(CollectionUtils.isEmpty(all)) {
            return;
        }else{
           for(AlarmRule alarmRule:all){
               if(clusterName.equals(alarmRule.getClusterName()) && alarmRule.getEnable() > 0){
                   alarmFilterRule.add(alarmRule);
               }
           }
        }
        asyncService.submitFuture(new ClusterNodeAlarm(JobKey.buildFutureKey(clusterName, Constant.NODE_ALARM, date),clusterName,json,alarmFilterRule));
    }catch (Exception e) {
        LOGGER.error("定时获取集群统计异常", e.getStackTrace());
    }
}
 
源代码16 项目: quartz-web   文件: MethodInvokeJob.java
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
    Date startDate = new Date();
    LOG.debug("methodInvokeJob start : " + DateUtils.formart(startDate));
    long startTime = startDate.getTime();

    JobDataMap jobDataMap = context.getMergedJobDataMap();
    //Object methodInvokerObj = jobDataMap.get("methodInvoker");
    Object jobClassObj = jobDataMap.get("jobClass");
    Object constructorArgumentsObj = jobDataMap.get("constructorArguments");
    Object jobClassMethodNameObj = jobDataMap.get("jobClassMethodName");
    Object jobClassMethodArgsObj = jobDataMap.get("jobClassMethodArgs");
    try {
        String jobClass = (String) jobClassObj;
        Object[] constructorArguments = (Object[]) constructorArgumentsObj;
        String jobClassMethodName = (String) jobClassMethodNameObj;
        Object[] jobClassMethodArgs = (Object[]) jobClassMethodArgsObj;
        Object jobBean;

        LOG.debug("methodInvokeJob jobClass:" + jobClass);
        LOG.debug("methodInvokeJob jobClassMethodName:" + jobClassMethodName);

        QuartzBeanManagerFacade quartzBeanManagerFacade = QuartzBeanManagerFacade.getInstance();

        if (constructorArguments != null && constructorArguments.length > 0) {
            jobBean = quartzBeanManagerFacade.getBean(jobClass, constructorArguments);
        } else {
            jobBean = quartzBeanManagerFacade.getBean(jobClass);
        }

        MethodInvoker methodInvoker = new MethodInvoker();
        methodInvoker.setTargetMethod(jobClassMethodName);
        methodInvoker.setArguments(jobClassMethodArgs);

        methodInvoker.setTargetObject(jobBean);

        boolean prepared = methodInvoker.isPrepared();
        if (!prepared) {
            methodInvoker.prepare();
        }
        Object result = methodInvoker.invoke();
        context.setResult(result);
        Date endDate = new Date();
        long endTime = endDate.getTime();
        LOG.debug("methodInvokeJob end : " + DateUtils.formart(endDate) + "," + (endTime - startTime));

    } catch (Exception e) {
        LOG.error("MethodInvokeJob exception message:" + e.getMessage(), e);
        e.printStackTrace();
        throw new JobExecutionException(e);
    }
}
 
源代码17 项目: deltaspike   文件: DynamicExpressionObserverJob.java
@Override
public void execute(JobExecutionContext context) throws JobExecutionException
{
    JobDataMap jobDataMap = context.getMergedJobDataMap();
    String configExpression = jobDataMap.getString(CONFIG_EXPRESSION_KEY);
    String triggerId = jobDataMap.getString(TRIGGER_ID_KEY);
    String activeCronExpression = jobDataMap.getString(ACTIVE_CRON_EXPRESSION_KEY);

    String configKey = configExpression.substring(1, configExpression.length() - 1);
    String configuredValue = ConfigResolver.getPropertyAwarePropertyValue(configKey, activeCronExpression);

    if (!activeCronExpression.equals(configuredValue))
    {
        //both #put calls are needed currently
        context.getJobDetail().getJobDataMap().put(ACTIVE_CRON_EXPRESSION_KEY, configuredValue);
        context.getTrigger().getJobDataMap().put(ACTIVE_CRON_EXPRESSION_KEY, configuredValue);

        BeanProvider.injectFields(this);

        JobKey observerJobKey = context.getJobDetail().getKey();
        String observedJobName = observerJobKey.getName()
            .substring(0, observerJobKey.getName().length() - OBSERVER_POSTFIX.length());
        JobKey observedJobKey = new JobKey(observedJobName, observerJobKey.getGroup());

        Trigger trigger = TriggerBuilder.newTrigger()
                .withIdentity(triggerId)
                .forJob(observedJobName, observedJobKey.getGroup())
                .withSchedule(CronScheduleBuilder.cronSchedule(configuredValue))
                .build();

        //use rescheduleJob instead of delete + add
        //(unwrap is ok here, because this class will only get active in case of a quartz-scheduler)
        org.quartz.Scheduler quartzScheduler = scheduler.unwrap(org.quartz.Scheduler.class);
        try
        {
            quartzScheduler.rescheduleJob(trigger.getKey(), trigger);
        }
        catch (SchedulerException e)
        {
            LOG.warning("failed to updated cron-expression for " + observedJobKey);
        }
    }
}
 
源代码18 项目: AsuraFramework   文件: FileScanJob.java
/** 
 * @see org.quartz.Job#execute(org.quartz.JobExecutionContext)
 */
public void execute(JobExecutionContext context) throws JobExecutionException {
    JobDataMap mergedJobDataMap = context.getMergedJobDataMap();
    SchedulerContext schedCtxt = null;
    try {
        schedCtxt = context.getScheduler().getContext();
    } catch (SchedulerException e) {
        throw new JobExecutionException("Error obtaining scheduler context.", e, false);
    }
    
    String fileName = mergedJobDataMap.getString(FILE_NAME);
    String listenerName = mergedJobDataMap.getString(FILE_SCAN_LISTENER_NAME);
    
    if(fileName == null) {
        throw new JobExecutionException("Required parameter '" + 
                FILE_NAME + "' not found in merged JobDataMap");
    }
    if(listenerName == null) {
        throw new JobExecutionException("Required parameter '" + 
                FILE_SCAN_LISTENER_NAME + "' not found in merged JobDataMap");
    }

    FileScanListener listener = (FileScanListener)schedCtxt.get(listenerName);
    
    if(listener == null) {
        throw new JobExecutionException("FileScanListener named '" + 
                listenerName + "' not found in SchedulerContext");
    }
    
    long lastDate = -1;
    if(mergedJobDataMap.containsKey(LAST_MODIFIED_TIME)) {
        lastDate = mergedJobDataMap.getLong(LAST_MODIFIED_TIME);
    }
    
    long newDate = getLastModifiedDate(fileName);

    if(newDate < 0) {
        log.warn("File '"+fileName+"' does not exist.");
        return;
    }
    
    if(lastDate > 0 && (newDate != lastDate)) {
        // notify call back...
        log.info("File '"+fileName+"' updated, notifying listener.");
        listener.fileUpdated(fileName); 
    } else if (log.isDebugEnabled()) {
        log.debug("File '"+fileName+"' unchanged.");
    }
    
    // It is the JobDataMap on the JobDetail which is actually stateful
    context.getJobDetail().getJobDataMap().put(LAST_MODIFIED_TIME, newDate);
}
 
源代码19 项目: sakai   文件: NavigableEventLogListener.java
private void info (EVENTTYPE eventType, Trigger trig, JobExecutionContext context, JobExecutionException exception, CompletedExecutionInstruction instructionCode) {
    JobDetail
        detail = (context != null)?context.getJobDetail():null;
    final JobDataMap
        dataMap = (context != null)?context.getMergedJobDataMap():null;
    final String
        jobName = (detail != null)?detail.getKey().getName():null,
        jobDesc = (detail != null)?detail.getDescription():null;
    final Class
        jobClass = (detail != null)?detail.getJobClass():null;
    final Trigger
        trigger = (trig != null)?trig:((context != null)?context.getTrigger():null);
    final String
        trigName = (trigger != null)?trigger.getKey().getName():null,
        trigDesc = (trigger != null)?trigger.getDescription():null;
    final Date
        trigStart = (trigger != null)?trigger.getStartTime():null,
        trigEnd = (trigger != null)?trigger.getEndTime():null;

    StringBuilder
        sb = new StringBuilder();

    switch (eventType)
    {
        case JOB_EXECUTING:
        {
            sb.append("Job Executing: [");
            sb.append("name: ").append(jobName).append(", description: ").append((jobDesc != null)?jobDesc:"")
                    .append(", class: ").append(jobClass.getName());
            sb.append("]");
            break;
        }
        case JOB_VETOED:
        {
            sb.append("Job Vetoed: [");
            sb.append("name: ").append(jobName).append(", description: ").append((jobDesc != null)?jobDesc:"")
                    .append(", class: ").append(jobClass.getName());
            break;
        }
        case JOB_EXECUTED:
        {
            sb.append("Job Executed: [");
            sb.append("name: ").append(jobName).append(", description: ").append((jobDesc != null)?jobDesc:"")
              .append(", class: ").append(jobClass.getName());

            if (exception != null)
            {
                sb.append (", exception: ").append(exception.getMessage());
                if (exception.getCause() != null)
                {
                  sb.append(", exception cause: ").append(exception.getCause().getClass().getName());
                }
            }
            sb.append("]");

            break;
        }
        case TRIGGER_FIRED:
        {
            sb.append("Trigger Fired: [");
            sb.append("trigger: ").append(trigName).append(", trigger description: ").append((trigDesc != null)?trigDesc:"")
              .append(", start: ").append((trigStart != null)?trigStart.toString():null)
              .append(", end: ").append((trigEnd != null)?trigEnd.toString():null);
            sb.append(", job: ").append(jobName).append(", job description: ").append((jobDesc != null)?jobDesc:"")
                    .append(", class: ").append(jobClass.getName());
            sb.append("]");
            break;
        }
        case TRIGGER_MISFIRED:
        {
            sb.append("Trigger Misfired: [");
            sb.append("trigger: ").append(trigName).append(", trigger description: ").append((trigDesc != null)?trigDesc:"")
              .append(", start: ").append((trigStart!=null)?trigStart.toString():null)
              .append(", end: ").append((trigEnd!=null)?trigEnd.toString():null);
            sb.append("]");
            break;
        }
        case TRIGGER_COMPLETED:
        {
            sb.append("Trigger Completed: [");
            sb.append("trigger: ").append(trigName).append(", trigger description: ").append((trigDesc != null)?trigDesc:"")
              .append(", start: ").append((trigStart!=null)?trigStart.toString():null)
              .append(", end: ").append((trigEnd!=null)?trigEnd.toString():null);
            sb.append(", job: ").append(jobName).append(", job description: ").append((jobDesc != null)?jobDesc:"")
              .append(", class: ").append(jobClass.getName())
              .append(", execution result: ").append(instructionCode);
            sb.append("]");
            break;
        }
    }
    if (log.isDebugEnabled())
    {
    	log.debug(sb.toString());
    }
}
 
源代码20 项目: sakai   文件: NavigableEventLogListener.java
private void info (EVENTTYPE eventType, Trigger trig, JobExecutionContext context, JobExecutionException exception, CompletedExecutionInstruction instructionCode) {
    JobDetail
        detail = (context != null)?context.getJobDetail():null;
    final JobDataMap
        dataMap = (context != null)?context.getMergedJobDataMap():null;
    final String
        jobName = (detail != null)?detail.getKey().getName():null,
        jobDesc = (detail != null)?detail.getDescription():null;
    final Class
        jobClass = (detail != null)?detail.getJobClass():null;
    final Trigger
        trigger = (trig != null)?trig:((context != null)?context.getTrigger():null);
    final String
        trigName = (trigger != null)?trigger.getKey().getName():null,
        trigDesc = (trigger != null)?trigger.getDescription():null;
    final Date
        trigStart = (trigger != null)?trigger.getStartTime():null,
        trigEnd = (trigger != null)?trigger.getEndTime():null;

    StringBuilder
        sb = new StringBuilder();

    switch (eventType)
    {
        case JOB_EXECUTING:
        {
            sb.append("Job Executing: [");
            sb.append("name: ").append(jobName).append(", description: ").append((jobDesc != null)?jobDesc:"")
                    .append(", class: ").append(jobClass.getName());
            sb.append("]");
            break;
        }
        case JOB_VETOED:
        {
            sb.append("Job Vetoed: [");
            sb.append("name: ").append(jobName).append(", description: ").append((jobDesc != null)?jobDesc:"")
                    .append(", class: ").append(jobClass.getName());
            break;
        }
        case JOB_EXECUTED:
        {
            sb.append("Job Executed: [");
            sb.append("name: ").append(jobName).append(", description: ").append((jobDesc != null)?jobDesc:"")
              .append(", class: ").append(jobClass.getName());

            if (exception != null)
            {
                sb.append (", exception: ").append(exception.getMessage());
                if (exception.getCause() != null)
                {
                  sb.append(", exception cause: ").append(exception.getCause().getClass().getName());
                }
            }
            sb.append("]");

            break;
        }
        case TRIGGER_FIRED:
        {
            sb.append("Trigger Fired: [");
            sb.append("trigger: ").append(trigName).append(", trigger description: ").append((trigDesc != null)?trigDesc:"")
              .append(", start: ").append((trigStart != null)?trigStart.toString():null)
              .append(", end: ").append((trigEnd != null)?trigEnd.toString():null);
            sb.append(", job: ").append(jobName).append(", job description: ").append((jobDesc != null)?jobDesc:"")
                    .append(", class: ").append(jobClass.getName());
            sb.append("]");
            break;
        }
        case TRIGGER_MISFIRED:
        {
            sb.append("Trigger Misfired: [");
            sb.append("trigger: ").append(trigName).append(", trigger description: ").append((trigDesc != null)?trigDesc:"")
              .append(", start: ").append((trigStart!=null)?trigStart.toString():null)
              .append(", end: ").append((trigEnd!=null)?trigEnd.toString():null);
            sb.append("]");
            break;
        }
        case TRIGGER_COMPLETED:
        {
            sb.append("Trigger Completed: [");
            sb.append("trigger: ").append(trigName).append(", trigger description: ").append((trigDesc != null)?trigDesc:"")
              .append(", start: ").append((trigStart!=null)?trigStart.toString():null)
              .append(", end: ").append((trigEnd!=null)?trigEnd.toString():null);
            sb.append(", job: ").append(jobName).append(", job description: ").append((jobDesc != null)?jobDesc:"")
              .append(", class: ").append(jobClass.getName())
              .append(", execution result: ").append(instructionCode);
            sb.append("]");
            break;
        }
    }
    if (log.isDebugEnabled())
    {
    	log.debug(sb.toString());
    }
}