下面列出了怎么用org.quartz.JobExecutionException的API类实例代码及写法,或者点击链接到github查看源代码。
@Override
protected void executeInternal(JobExecutionContext aJobContext) throws JobExecutionException {
int triggerState = -2;
try {
triggerState = aJobContext.getScheduler().getTriggerState(null, "HeartBeatTriggerGroup");
aJobContext.getScheduler().pauseTriggerGroup("HeartBeatTriggerGroup");
HarvestAgentStatusDTO status = harvestAgent.getStatus();
notifier.heartbeat(status);
aJobContext.getScheduler().resumeTriggerGroup("HeartBeatTriggerGroup");
}
catch (SchedulerException e) {
e.printStackTrace();
if (e.getCause() != null)
e.getCause().printStackTrace();
throw new JobExecutionException("Heartbeat failed controlling the scheduler. (triggerState is: " + triggerState + ")");
}
}
@Override
protected void executeInternalSafe() throws JobExecutionException {
Calendar time = CalendarUtils.getInstance();
time.set(Calendar.SECOND, 0);
time.set(Calendar.MILLISECOND, 0);
Object[][] array = Application.createQueryNoFilter(
"select createdBy,feedsId,content,contentMore from Feeds where scheduleTime = ? and type = ?")
.setParameter(1, time.getTime())
.setParameter(2, FeedsType.SCHEDULE.getMask())
.array();
if (array.length > 0) {
doInternal(array);
}
}
public void execute(JobExecutionContext context)
throws JobExecutionException {
JobDataMap data = context.getMergedJobDataMap();
String command = data.getString(PROP_COMMAND);
String parameters = data.getString(PROP_PARAMETERS);
if (parameters == null) {
parameters = "";
}
boolean wait = true;
if(data.containsKey(PROP_WAIT_FOR_PROCESS)) {
wait = data.getBooleanValue(PROP_WAIT_FOR_PROCESS);
}
boolean consumeStreams = false;
if(data.containsKey(PROP_CONSUME_STREAMS)) {
consumeStreams = data.getBooleanValue(PROP_CONSUME_STREAMS);
}
Integer exitCode = this.runNativeCommand(command, parameters, wait, consumeStreams);
context.setResult(exitCode);
}
public void execute(JobExecutionContext context) throws JobExecutionException
{
JobDataMap jobData = context.getJobDetail().getJobDataMap();
// extract the content Cleanup to use
Object nodeCleanupWorkerObj = jobData.get("nodeCleanupWorker");
if (nodeCleanupWorkerObj == null || !(nodeCleanupWorkerObj instanceof NodeCleanupWorker))
{
throw new AlfrescoRuntimeException(
"NodeCleanupJob data must contain valid 'nodeCleanupWorker' reference");
}
NodeCleanupWorker nodeCleanupWorker = (NodeCleanupWorker) nodeCleanupWorkerObj;
List<String> cleanupLog = nodeCleanupWorker.doClean();
// Done
if (logger.isDebugEnabled())
{
logger.debug("Node cleanup log:");
for (String log : cleanupLog)
{
logger.debug(log);
}
}
}
@Override
public void execute(JobExecutionContext context)
throws JobExecutionException {
JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
try {
List<HbaseResourceSensitivityAPIEntity>
hbaseResourceSensitivity = load(jobDataMap, "HbaseResourceSensitivityService");
if(hbaseResourceSensitivity == null) {
LOG.warn("Hbase resource sensitivity information is empty");
return;
}
Map<String, HbaseResourceSensitivityAPIEntity> map = Maps.uniqueIndex(
hbaseResourceSensitivity,
new Function<HbaseResourceSensitivityAPIEntity, String>() {
@Override
public String apply(HbaseResourceSensitivityAPIEntity input) {
return input.getTags().get("hbaseResource");
}
});
ExternalDataCache.getInstance().setJobResult(getClass(), map);
} catch(Exception ex) {
LOG.error("Fail to load hbase resource sensitivity data", ex);
}
}
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {
MessageDigest digest;
try {
digest = MessageDigest.getInstance(ALGORITHM);
} catch (NoSuchAlgorithmException e) {
throw new JobExecutionException("Can't get digest for "+ ALGORITHM);
}
String[] types = {
ResourceType.TYPE_HTML, ResourceType.MIME_TYPE_TEXT, ResourceType.TYPE_UPLOAD
};
IteratorChain allFiles = new IteratorChain();
for (String type : types) {
Iterator<ContentResource> resourceIterator = new ContentHostingIterator<ContentResource>(type);
allFiles.addIterator(resourceIterator);
}
// Now check all the files.
ContentResourceChecker checker = new ContentResourceChecker(allFiles, digest);
checker.check();
}
@Override
public void beforeUpdate(
final ProvisioningProfile<?, ?> profile,
final SyncDelta delta,
final EntityTO entityTO,
final AnyUR anyUR) throws JobExecutionException {
AttrPatch fullnamePatch = null;
for (AttrPatch attrPatch : anyUR.getPlainAttrs()) {
if ("fullname".equals(attrPatch.getAttr().getSchema())) {
fullnamePatch = attrPatch;
}
}
if (fullnamePatch == null) {
fullnamePatch = new AttrPatch.Builder(new Attr.Builder("fullname").build()).
operation(PatchOperation.ADD_REPLACE).
build();
}
fullnamePatch.getAttr().getValues().clear();
fullnamePatch.getAttr().getValues().add(String.valueOf(counter++));
}
@Override
public void schedule(JobExecutionContext jobExecutionContext) throws Exception {
try {
List<KeyLock> targets = new ArrayList<>();
Integer count = 0;
do {
try (EntityManagerContainer emc = EntityManagerContainerFactory.instance().create()) {
targets = emc.listLessThan(KeyLock.class, JpaObject.createTime_FIELDNAME,
DateUtils.addMinutes(new Date(), -2));
if (!targets.isEmpty()) {
emc.beginTransaction(KeyLock.class);
for (KeyLock o : targets) {
emc.remove(o);
count++;
}
emc.commit();
}
}
} while (!targets.isEmpty());
logger.debug("定时清理值锁定:{}条.", count);
} catch (Exception e) {
throw new JobExecutionException(e);
}
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
JobDataMap dataMap = context.getJobDetail().getJobDataMap();
String scriptUrl = dataMap.getString(SCRIPT_URL_DATA_KEY);
SiteContext siteContext = (SiteContext)dataMap.get(SITE_CONTEXT_DATA_KEY);
ServletContext servletContext = (ServletContext)dataMap.get(SERVLET_CONTEXT_DATA_KEY);
ScriptFactory scriptFactory = siteContext.getScriptFactory();
if (scriptFactory == null) {
throw new JobExecutionException(
"No script factory associate to site context '" + siteContext.getSiteName() + "'");
}
SiteContext.setCurrent(siteContext);
try {
Map<String, Object> variables = new HashMap<>();
GroovyScriptUtils.addJobScriptVariables(variables, servletContext);
scriptFactory.getScript(scriptUrl).execute(variables);
} catch (Exception e) {
throw new JobExecutionException("Error executing script job at " + scriptUrl, e);
} finally {
SiteContext.clear();
}
}
@Override
public void execute( JobExecutionContext context )
throws JobExecutionException {
String appName = (String) context.getJobDetail().getJobDataMap().get( RoboconfScheduler.APP_NAME );
String jobName = (String) context.getJobDetail().getJobDataMap().get( RoboconfScheduler.JOB_NAME );
String commandsFileName = (String) context.getJobDetail().getJobDataMap().get( RoboconfScheduler.CMD_NAME );
try {
Manager manager = (Manager) context.getScheduler().getContext().get( RoboconfScheduler.MANAGER );
Application app = manager.applicationMngr().findApplicationByName( appName );
// The web console finds jobs by names, not IDs, which remain internal to Quartz
manager.commandsMngr().execute( app, commandsFileName, CommandHistoryItem.ORIGIN_SCHEDULER, jobName );
} catch( Exception e ) {
this.logger.warning( "An error occurred while executing job " + jobName + " (command file =" + commandsFileName + ")." );
Utils.logException( this.logger, e );
}
}
@Override
public void schedule(JobExecutionContext jobExecutionContext) throws Exception {
try (EntityManagerContainer emc = EntityManagerContainerFactory.instance().create()) {
Business business = new Business(emc);
new TimerUnitStubs().execute(business);
new TimerPersonStubs().execute(business);
new TimerApplicationStubs().execute(business);
new TimerSummary().execute(business);
new TimerRunning().execute(business);
new TimerOrganization().execute(business);
new TimerCategory().execute(business);
} catch (Exception e) {
logger.error(e);
throw new JobExecutionException(e);
}
}
@Override
protected final void executeInternal(JobExecutionContext arg0) throws JobExecutionException {
boolean success = false;
try {
// init logging framework
ThreadLocalUserActivityLoggerInstaller.initEmptyUserActivityLogger();
executeWithDB(arg0);
DBFactory.getInstance(false).commitAndCloseSession();
success = true;
} catch (JobExecutionException e) {
// for documentation purpose only
throw e;
} finally {
// clean up logging
ThreadLocalUserActivityLoggerInstaller.resetUserActivityLogger();
if (!success) {
DBFactory.getInstance(false).rollbackAndCloseSession();
}
}
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
DataEnrichLCM lcm = (DataEnrichLCM)jobDataMap.getOrDefault("dataEnrichLCM", null);
if(lcm == null)
throw new IllegalStateException("dataEnrichLCM implementation should be provided");
try {
Collection externalEntities = lcm.loadExternal();
Map<Object, Object> map = Maps.uniqueIndex(
externalEntities,
entity -> lcm.getCacheKey(entity)
);
ExternalDataCache.getInstance().setJobResult(lcm.getClass(), map);
} catch(Exception ex) {
LOG.error("Fail to load sensitivity data", ex);
}
}
/**
* implement the quartz job interface, which is called by the scheduler when a trigger associated with the job fires.
* this quartz job removes course sites that are more than a specified number of terms old.
*/
public void execute(JobExecutionContext context) throws JobExecutionException {
synchronized (this) {
log.info("execute()");
if (user == null) {
log.error("The scheduled job to remove course sites can not be run with an invalid user. No courses were published.");
} else {
try {
// switch the current user to the one specified to run the quartz job
Session sakaiSesson = sessionManager.getCurrentSession();
sakaiSesson.setUserId(user.getId());
int numSitesPublished = courseSitePublishService.publishCourseSites(numDaysBeforeTermStarts);
log.info(numSitesPublished + " course sites were published.");
} catch (Exception ex) {
log.error(ex.getMessage());
}
}
}
}
/**
* {@inheritDoc}
*/
public void execute(JobExecutionContext arg0In)
throws JobExecutionException {
List<Map<String, Long>> failedRebootActions = lookupRebootActionCleanup();
for (Map<String, Long> fa : failedRebootActions) {
Long sid = fa.get("server_id");
Long aid = fa.get("action_id");
List<Long> fAids = invalidateActionRecursive(sid, aid);
for (Long fAid : fAids) {
invalidateKickstartSession(sid, fAid);
}
}
if (failedRebootActions.size() > 0) {
log.info("Set " + failedRebootActions.size() +
" reboot action(s) to failed. Running longer than 6 hours.");
}
}
@Override
public void jobWasExecuted(JobExecutionContext context, JobExecutionException exception) {
JobExecution execution = CurrentJobExecution.get();
if (exception != null) {
execution.error();
JobDetail jobDetail = context.getJobDetail();
JobLogs.error("Exception occurred while executing job "
+ Jobs.jobCass(jobDetail).getName(), exception);
}
executions.jobEnds(execution, context);
JobLogs.setDefaultLevel();
CurrentJobExecution.unset();
CurrentJobExecutionContext.unset();
}
@Test
public void testInstanceSyncCMRunning() throws JobExecutionException {
setupForCM();
when(clusterStatusResult.getClusterStatus()).thenReturn(ClusterStatus.CLUSTERMANAGER_RUNNING);
when(clusterApiConnectors.getConnector(stack)).thenReturn(clusterApi);
when(clusterApi.clusterStatusService()).thenReturn(clusterStatusService);
underTest.executeInternal(jobExecutionContext);
verify(clusterOperationService, times(1)).reportHealthChange(any(), anySet(), anySet());
verify(stackInstanceStatusChecker).queryInstanceStatuses(eq(stack), any());
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
log.info(String.join(":",new Date().toString(),"redis库存同步"));
stringRedisTemplate.opsForHash().keys(RedisConstant.PRODUCT_STOCKS).parallelStream().forEach(key->{
List<String> skusPrefix = splitter.splitToList(key.toString());
String s = skusPrefix.get(1);
log.info("skuId:"+s);
String redisStock = (String) stringRedisTemplate.opsForHash().get(RedisConstant.PRODUCT_STOCKS, key);
PurchaseProductSku productSku = productSkuMapper.selectByPrimaryKey(Integer.parseInt(s));
productSku.setSales(productSku.getSales()+(productSku.getStock()-Integer.parseInt(redisStock)));
productSku.setStock(Integer.parseInt(redisStock));
productSkuMapper.updateByPrimaryKey(productSku);
});
}
public void execute(JobExecutionContext context) throws JobExecutionException {
DetectionJobDetail jobDetail=(DetectionJobDetail)context.getJobDetail();
Session session=jobDetail.getSessionFactory().openSession();
try {
String currentInstanceName=jobDetail.getCurrentInstanceName();
Operation operation=detection(session,jobDetail.getJobInstanceNames(),currentInstanceName);
if(operation.equals(Operation.reset)){
SchedulerService service=jobDetail.getSchedulerService();
service.resetScheduer();
Heartbeat beat=new Heartbeat();
Calendar c=Calendar.getInstance();
c.setTime(new Date());
c.add(Calendar.SECOND, 1);
beat.setDate(c.getTime());
beat.setId(UUID.randomUUID().toString());
beat.setInstanceName(currentInstanceName);
session.save(beat);
initHeartJob(currentInstanceName, service.getScheduler());
}
} catch (Exception e) {
throw new JobExecutionException(e);
}finally{
session.flush();
session.close();
}
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
Long pollableTaskId = context.getMergedJobDataMap().getLong(POLLABLE_TASK_ID);
currentPollableTask = pollableTaskService.getPollableTask(pollableTaskId);
ExceptionHolder exceptionHolder = new ExceptionHolder(currentPollableTask);
try {
I callInput;
String inputStringFromJob = context.getMergedJobDataMap().getString(INPUT);
if (inputStringFromJob != null) {
logger.debug("Inlined data, read from job data");
callInput = (I) objectMapper.readValueUnchecked(inputStringFromJob, typeTokenInput.getRawType());
} else {
logger.debug("No inlined data, read from blob storage");
callInput = (I) pollableTaskBlobStorage.getInput(pollableTaskId, typeTokenInput.getRawType());
}
O callOutput = call(callInput);
if (!typeTokenOutput.getRawType().equals(Void.class) ) {
pollableTaskBlobStorage.saveOutput(pollableTaskId, callOutput);
}
} catch (Throwable t) {
pollableTaskExceptionUtils.processException(t, exceptionHolder);
} finally {
currentPollableTask = pollableTaskService.finishTask(
currentPollableTask.getId(),
null,
exceptionHolder,
null);
}
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
// get the reference to the Stick
JobDataMap dataMap = context.getJobDetail().getJobDataMap();
CirclePlus circlePlus = (CirclePlus) dataMap.get(CIRCLE_PLUS_JOB_DATA_KEY);
circlePlus.setClock();
}
public void execute(JobExecutionContext arg0) throws JobExecutionException{
try {
runner(arg0);
} catch (Exception e) {
e.printStackTrace();
log.debug("Exception ("+this.getClass()+") ....................................> "+e.toString());
throw new JobExecutionException(e);
}
}
/**
* @see org.quartz.Job#execute(org.quartz.JobExecutionContext)
*/
public void execute(JobExecutionContext aJobContext) throws JobExecutionException {
if (aJobContext.getJobDetail().getJobDataMap() != null) {
JobDataMap jdm = aJobContext.getJobDetail().getJobDataMap();
String jobName = (String) jdm.get(PARAM_JOB_NAME);
Integer failureStep = (Integer) jdm.get(PARAM_FAILURE_STEP);
Boolean msgSent = (Boolean) jdm.get(PARAM_MSG_SENT);
Integer retries = (Integer) jdm.get(PARAM_RETRIES);
if (log.isInfoEnabled()) {
log.info("Processing job completion for " + jobName);
}
ApplicationContext context = ApplicationContextFactory.getWebApplicationContext();
HarvestAgent ha = (HarvestAgent) context.getBean(Constants.BEAN_HARVEST_AGENT);
int failedOn = ha.completeHarvest(jobName, failureStep);
if (failedOn != HarvestAgent.NO_FAILURES) {
if (!msgSent.booleanValue()) {
// Send the failure notification.
HarvestCoordinatorNotifier harvestCoordinatorNotifier = (HarvestCoordinatorNotifier) context.getBean(Constants.BEAN_NOTIFIER);
harvestCoordinatorNotifier.notification(new Long(jobName), MessageType.CATEGORY_MISC, MessageType.TARGET_INSTANCE_PROCESSING_ERROR);
msgSent = new Boolean(true);
}
try {
SchedulerUtil.scheduleHarvestCompleteJob(jobName, failedOn, msgSent, retries.intValue()+ 1);
}
catch (Exception e) {
throw new HarvestAgentException("Failed to start harvest complete job : " + e.getMessage(), e);
}
}
}
}
@Override
public void executeInContext(final JobExecutionContext jobCtx, final InterruptionStatus interruption)
throws JobExecutionException {
final long snifferId = SnifferJobManager.getSnifferId(jobCtx.getJobDetail().getKey());
final long logSourceId = SnifferJobManager.getLogSourceId(jobCtx.getJobDetail().getKey());
final ScheduleInfo scheduleInfo = scheduleInfoAccess.getScheduleInfo(snifferId);
scheduleInfo.setLastFireTime(new Date());
scheduleInfoAccess.updateScheduleInfo(snifferId, scheduleInfo);
logger.debug("Start sniffing job processing for sniffer with id {} and log source {}", snifferId, logSourceId);
final Sniffer sniffer = snifferPersistence.getSniffer(snifferId);
if (sniffer == null) {
logger.error("Sniffer not found for id {}, stopping cron job for log source {}", snifferId, logSourceId);
deleteJob(jobCtx.getScheduler(), snifferId);
return;
}
final LogSource<LogRawAccess<? extends LogInputStream>> logSource = logSourceProvider
.getSourceById(logSourceId);
if (logSource == null) {
logger.error("Log source not found for id {}, stopping cron job for sniffer {}", logSourceId, snifferId);
deleteJob(jobCtx.getScheduler(), snifferId);
return;
}
try {
sniff(sniffer, logSource, interruption);
} catch (final Exception e) {
logger.error("Failed sniffing in context of sniffer={} and log source={}", sniffer, logSource);
throw new JobExecutionException("Failed sniffing", e, false);
} finally {
logger.debug("Stopped sniffing job processing for sniffer with id {} and log source {}", snifferId,
logSourceId);
}
}
@Override
public void schedule(JobExecutionContext jobExecutionContext) throws Exception {
try (EntityManagerContainer emc = EntityManagerContainerFactory.instance().create()) {
Business business = new Business(emc);
alarm(business);
logger.info("The trigger for calendar alarm execute completed." + new Date());
} catch (Exception e) {
logger.error(e);
throw new JobExecutionException(e);
}
}
@Override
protected void executeInternal(JobExecutionContext arg0) throws JobExecutionException {
// look for queued jobs and execute them
try {
logger.debug("JobQueueQuartzJob was called");
jobQueueService.checkAndRunJobs();
} catch (Exception e) {
logger.error("JobQueueQuartzJob call returned an error: " + e.getMessage(), e);
throw new JobExecutionException("JobQueueQuartzJob call returned an error", e, false);
}
}
public void execute(JobExecutionContext arg0) throws JobExecutionException{
try {
runner(arg0);
} catch (Exception e) {
e.printStackTrace();
log.debug("Exception ("+this.getClass()+") ....................................> "+e.toString());
throw new JobExecutionException(e);
}
}
@Test
@DisplayName(
"GIVEN an available stack " +
"WHEN all instances go down " +
"THEN stack is no more available"
)
void availableStackAllInstancesGoesDown() throws JobExecutionException {
setUpClusterStatus(ClusterStatus.STARTED);
stack.setStackStatus(new StackStatus(stack, DetailedStackStatus.AVAILABLE));
setUpClusterManagerStatus(INSTANCE_1, ClusterManagerStatus.HEALTHY);
setUpClusterManagerStatus(INSTANCE_2, ClusterManagerStatus.UNHEALTHY);
setUpCloudVmInstanceStatuses(Map.of(
INSTANCE_1, com.sequenceiq.cloudbreak.cloud.model.InstanceStatus.TERMINATED_BY_PROVIDER,
INSTANCE_2, com.sequenceiq.cloudbreak.cloud.model.InstanceStatus.TERMINATED_BY_PROVIDER));
when(instanceMetaDataService.findNotTerminatedForStackWithoutInstanceGroups(STACK_ID)).thenReturn(Set.of());
underTest.executeInternal(jobExecutionContext);
verify(instanceMetaDataService, never()).findHostInStack(eq(STACK_ID), any());
verify(hostGroupService, never()).getRepairViewByClusterIdAndName(anyLong(), anyString());
verify(flowManager, never()).triggerClusterRepairFlow(anyLong(), any(), anyBoolean());
verify(instanceMetaDataService, never()).saveAll(any());
verify(clusterService, never()).updateClusterStatusByStackId(any(), any(), any());
assertInstancesSavedWithStatuses(Map.of(
INSTANCE_1, InstanceStatus.DELETED_BY_PROVIDER,
INSTANCE_2, InstanceStatus.DELETED_BY_PROVIDER));
verify(stackUpdater).updateStackStatus(eq(STACK_ID), eq(DetailedStackStatus.DELETED_ON_PROVIDER_SIDE), any());
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
SimpleDateFormat f = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
log.info("{},{} timer execute {} executing...", this.toString(), context.getJobDetail().getKey().getName(), f.format(new Date()));
String taskName = context.getJobDetail().getKey().getName();
log.info("{},{} Task execute {} executing...", this.toString(), taskName, f.format(new Date()));
dealTimerTask(context, taskName);
}
@Override
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {
String beanId = jobExecutionContext.getJobDetail().getJobDataMap().getString(SPRING_BEAN_NAME);
Job job = (Job) applicationContext.getBean(beanId);
if (job instanceof StatefulJob) {
log.warn("Non-stateful wrapper used with stateful job: {}, use SpringStatefulJobBeanWrapper", beanId);
}
job.execute(jobExecutionContext);
}