下面列出了org.quartz.UnableToInterruptJobException#org.quartz.JobExecutionContext 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* @see org.springframework.scheduling.quartz.QuartzJobBean#executeInternal(org.quartz.JobExecutionContext)
*/
@Override
protected void executeInternal(JobExecutionContext context) throws JobExecutionException {
IMonitoringFullService monitoringService = getMonitoringService(context);
//getting gate id set from scheduler
Map properties = context.getJobDetail().getJobDataMap();
Long gateId = (Long) properties.get("gateId");
if (log.isDebugEnabled()) {
log.debug("Closing gate......[" + gateId.longValue() + "]");
}
monitoringService.closeGate(gateId);
if (log.isDebugEnabled()) {
log.debug("Gate......[" + gateId.longValue() + "] Closed");
}
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
RDBAnalyze rdbAnalyze = (RDBAnalyze) context.getJobDetail().getJobDataMap().get("rdbAnalyzeJob");
int[] strings = null;
if (rdbAnalyze.getAnalyzer().contains(",")) {
String[] str = rdbAnalyze.getAnalyzer().split(",");
strings = new int[str.length];
for (int i = 0; i < str.length; i++) {
strings[i] = Integer.parseInt(str[i]);
}
} else {
strings = new int[1];
strings[0] = Integer.parseInt(rdbAnalyze.getAnalyzer());
}
JSONObject status = rdbAnalyzeService.allocationRDBAnalyzeJob(rdbAnalyze.getId(), strings);
LOG.info("cron :{}", rdbAnalyze.getSchedule());
if ((boolean) status.get("status")) {
LOG.info("cron success,message:{}", status.get("message"));
} else {
LOG.warn("cron faild!,message:{}", status.get("message"));
}
}
public void notifyTriggerListenersComplete(JobExecutionContext jec,
int instCode) throws SchedulerException {
// build a list of all trigger listeners that are to be notified...
List triggerListeners = buildTriggerListenerList(jec.getTrigger()
.getTriggerListenerNames());
// notify all trigger listeners in the list
java.util.Iterator itr = triggerListeners.iterator();
while (itr.hasNext()) {
TriggerListener tl = (TriggerListener) itr.next();
try {
tl.triggerComplete(jec.getTrigger(), jec, instCode);
} catch (Exception e) {
SchedulerException se = new SchedulerException(
"TriggerListener '" + tl.getName()
+ "' threw exception: " + e.getMessage(), e);
se.setErrorCode(SchedulerException.ERR_TRIGGER_LISTENER);
throw se;
}
}
}
@Override
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {
try {
if (pirmaryCenter()) {
try (EntityManagerContainer emc = EntityManagerContainerFactory.instance().create()) {
if (!ThisApplication.zhengwuDingdingSyncOrganizationCallbackRequest.isEmpty()) {
ThisApplication.zhengwuDingdingSyncOrganizationCallbackRequest.clear();
Business business = new Business(emc);
SyncOrganization o = new SyncOrganization();
o.execute(business);
}
}
}
} catch (Exception e) {
logger.error(e);
throw new JobExecutionException(e);
}
}
@Override
public void execute(JobExecutionContext arg0In) throws JobExecutionException {
if (log.isDebugEnabled()) {
log.debug("start notifications cleanup");
}
// Measure time and calculate the total duration
long start = System.currentTimeMillis();
int lifetime = ConfigDefaults.get().getNotificationsLifetime();
Date before = Date.from(LocalDate.now().atStartOfDay().minusDays(lifetime)
.atZone(ZoneId.systemDefault()).toInstant());
log.info("Deleting all notification messages created before: " + before);
int deleted = UserNotificationFactory.deleteNotificationMessagesBefore(before);
log.info("Notification messages deleted: " + deleted);
if (log.isDebugEnabled()) {
long duration = System.currentTimeMillis() - start;
log.debug("Total duration was: " + duration + " ms");
}
}
@Override
public void schedule(JobExecutionContext jobExecutionContext) throws Exception {
try {
List<KeyLock> targets = new ArrayList<>();
Integer count = 0;
do {
try (EntityManagerContainer emc = EntityManagerContainerFactory.instance().create()) {
targets = emc.listLessThan(KeyLock.class, JpaObject.createTime_FIELDNAME,
DateUtils.addMinutes(new Date(), -2));
if (!targets.isEmpty()) {
emc.beginTransaction(KeyLock.class);
for (KeyLock o : targets) {
emc.remove(o);
count++;
}
emc.commit();
}
}
} while (!targets.isEmpty());
logger.debug("定时清理值锁定:{}条.", count);
} catch (Exception e) {
throw new JobExecutionException(e);
}
}
@Override
public void execute(JobExecutionContext arg0) {
String db_server, db_name, db_user, db_pass, save_path;
int db_port;
db_server = Utils.getSdaProperty("com.pineone.icbms.sda.ss.db.server");
db_port = Integer.parseInt(Utils.getSdaProperty("com.pineone.icbms.sda.ss.db.port"));
db_name = Utils.getSdaProperty("com.pineone.icbms.sda.ss.db.name");
db_user = Utils.getSdaProperty("com.pineone.icbms.sda.ss.db.user");
db_pass = Utils.getSdaProperty("com.pineone.icbms.sda.ss.db.pass");
save_path = Utils.getSdaProperty("com.pineone.icbms.sda.triple.save_path");
// 폴더가 없으면 생성(년월일을 폴더에 포함함)
save_path = Utils.makeSavePath(save_path);
try {
collect(db_server, db_port, db_name, db_user, db_pass, save_path, arg0);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* {@inheritDoc}
*/
public void execute(JobExecutionContext ctx) throws JobExecutionException {
List<String> cmd = new ArrayList<String>();
String list = (String) ctx.getJobDetail().getJobDataMap().get("list");
String channel = (String) ctx.getJobDetail().getJobDataMap().get("channel");
cmd.add("/usr/bin/satellite-sync");
if (list != null) {
cmd.add("--list-channels");
}
else if (channel != null) {
cmd.add("-c");
cmd.add(channel);
}
String[] args = cmd.toArray(new String[cmd.size()]);
executeExtCmd(args);
}
/**
* {@inheritDoc}
*/
@Override
public void execute(JobExecutionContext jobContext) throws JobExecutionException {
JobDataMap jobDataMap = jobContext.getJobDetail().getJobDataMap();
String locationId = jobDataMap.getString("locationId");
logger.debug("Starting Weather job for location '{}'", locationId);
try {
LocationConfig locationConfig = context.getConfig().getLocationConfig(locationId);
WeatherProvider weatherProvider = WeatherProviderFactory
.createWeatherProvider(locationConfig.getProviderName());
context.setWeather(locationId, weatherProvider.getWeather(locationConfig));
weatherPublisher.publish(locationId);
} catch (Exception ex) {
logger.error(ex.getMessage(), ex);
throw new JobExecutionException(ex.getMessage(), ex);
}
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
if (siteId == null) {
siteId = UUID.randomUUID().toString();
}
log.info("Attempting to import: " + zip+ " into "+ siteId);
Session currentSession = sessionManager.getCurrentSession();
String oldId = currentSession.getUserId();
String oldEid = currentSession.getUserEid();
try {
currentSession.setUserId("admin");
currentSession.setUserEid("admin");
archiveService.mergeFromZip(zip, siteId, null);
} catch (Exception e) {
log.warn("Failed to import " + zip + " to " + siteId + " " + e.getMessage());
} finally {
currentSession.setUserId(oldId);
currentSession.setUserEid(oldEid);
}
}
@SuppressWarnings("unchecked")
private static void startCEPRule(JobExecutionContext context, String jobName, String type) {
try {
CEPRule rule = JsonHelper.json2Object(context.getJobDetail().getJobDataMap().get("rule").toString(), CEPRule.class);
// ruleMap
Pair<CEPRule, CEPRule> ruleBak = null;
if (StringUtils.isEmpty(context.getJobDetail().getJobDataMap().get("ruleBak"))) {
ruleBak = (Pair<CEPRule, CEPRule>) context.getJobDetail().getJobDataMap().get("ruleBak");
}
log.info("{}, {}, {}", rule, jobName, type);
// check the status,when the status equal 1,then update
if (RuleStatusEnum.RUNNING.getCode().equals(rule.getStatus()) || RuleStatusEnum.NOT_STARTED.getCode().equals(rule.getStatus()) || RuleStatusEnum.IS_DELETED.getCode().equals(rule.getStatus())) {
CEPRuleCache.updateCEPRule(rule, ruleBak);
}
} catch (BrokerException | SchedulerException e) {
log.error("BrokerException:{}", e.toString());
}
}
@Override
public void execute(JobExecutionContext arg0) {
String db_server, db_name, db_user, db_pass, save_path;
int db_port;
db_server = Utils.getSdaProperty("com.pineone.icbms.sda.ss.db.server");
db_port = Integer.parseInt(Utils.getSdaProperty("com.pineone.icbms.sda.ss.db.port"));
db_name = Utils.getSdaProperty("com.pineone.icbms.sda.ss.db.name");
db_user = Utils.getSdaProperty("com.pineone.icbms.sda.ss.db.user");
db_pass = Utils.getSdaProperty("com.pineone.icbms.sda.ss.db.pass");
save_path = Utils.getSdaProperty("com.pineone.icbms.sda.triple.save_path");
// 폴더가 없으면 생성(년월일을 폴더에 포함함)
save_path = Utils.makeSavePath(save_path);
try {
collect(db_server, db_port, db_name, db_user, db_pass, save_path, arg0);
} catch (Exception e) {
e.printStackTrace();
}
}
public void notifyTriggerListenersComplete(JobExecutionContext jec,
CompletedExecutionInstruction instCode) throws SchedulerException {
// build a list of all trigger listeners that are to be notified...
List<TriggerListener> triggerListeners = buildTriggerListenerList();
// notify all trigger listeners in the list
for(TriggerListener tl: triggerListeners) {
try {
if(!matchTriggerListener(tl, jec.getTrigger().getKey()))
continue;
tl.triggerComplete(jec.getTrigger(), jec, instCode);
} catch (Exception e) {
SchedulerException se = new SchedulerException(
"TriggerListener '" + tl.getName()
+ "' threw exception: " + e.getMessage(), e);
throw se;
}
}
}
/**
* Calls the cleaner to do its work
*/
public void execute(JobExecutionContext context) throws JobExecutionException
{
JobDataMap jobData = context.getJobDetail().getJobDataMap();
// extract the content cleaner to use
Object sharedFolderPatchObj = jobData.get("sharedFolderPatch");
if (sharedFolderPatchObj == null || !(sharedFolderPatchObj instanceof SharedFolderPatch))
{
throw new AlfrescoRuntimeException(
"'sharedFolderPatch' data must contain valid 'SharedFolderPatch' reference");
}
// Job Lock Here - should probably move into the patch service at some time.
SharedFolderPatch sharedFolderPatch = (SharedFolderPatch) sharedFolderPatchObj;
sharedFolderPatch.executeAsync();
}
@Override
public void interruptJob(String jobName) {
List<JobExecutionContext> runningJobs = getRunningJobs();
for (JobExecutionContext jobCtx : runningJobs) {
if (jobName.equals(jobCtx.getJobDetail().getName())) {
// if so...
try {
((Job) jobCtx.getJobInstance()).interrupt();
}
catch (UnableToInterruptJobException ex) {
LOG.warn("Unable to perform job interrupt", ex);
}
break;
}
}
}
/**
* This implementation applies the passed-in job data map as bean property
* values, and delegates to {@code executeInternal} afterwards.
* @see #executeInternal
*/
@Override
public final void execute(JobExecutionContext context) throws JobExecutionException {
try {
BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(this);
MutablePropertyValues pvs = new MutablePropertyValues();
pvs.addPropertyValues(context.getScheduler().getContext());
pvs.addPropertyValues(context.getMergedJobDataMap());
bw.setPropertyValues(pvs, true);
}
catch (SchedulerException ex) {
throw new JobExecutionException(ex);
}
executeInternal(context);
}
/**
* {@inheritDoc}
*/
@Override
public boolean vetoJobExecution(Trigger trigger, JobExecutionContext ctx) {
boolean retval = false;
for (Iterator iter = this.listenerChain.iterator(); iter.hasNext();) {
TriggerListener listener = (TriggerListener) iter.next();
boolean tmp = listener.vetoJobExecution(trigger, ctx);
if (!retval && tmp) {
retval = true;
}
}
return retval;
}
public static DetectionPipelineTaskInfo buildTaskInfo(JobExecutionContext jobExecutionContext) {
JobKey jobKey = jobExecutionContext.getJobDetail().getKey();
Long id = getIdFromJobKey(jobKey.getName());
DetectionConfigDTO configDTO = DAORegistry.getInstance().getDetectionConfigManager().findById(id);
return buildTaskInfoFromDetectionConfig(configDTO, System.currentTimeMillis());
}
@Override
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {
try {
if (pirmaryCenter()) {
CenterQueueRefreshBody body = new CenterQueueRefreshBody();
ThisApplication.centerQueue.send(body);
}
} catch (Exception e) {
logger.error(e);
throw new JobExecutionException(e);
}
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
LOGGER.info("================job start================");
JobDataMap jobDataMap = context.getMergedJobDataMap();
echoBiz.echo(jobDataMap.getIntValue(RANDOM_VALUE));
echoBiz.echo(jobDataMap.getString(CREATE_TIME));
LOGGER.info("================job end================");
}
@Override
public void executeInContext(final JobExecutionContext jobCtx, final InterruptionStatus interruption)
throws JobExecutionException {
final long snifferId = SnifferJobManager.getSnifferId(jobCtx.getJobDetail().getKey());
final long logSourceId = SnifferJobManager.getLogSourceId(jobCtx.getJobDetail().getKey());
final ScheduleInfo scheduleInfo = scheduleInfoAccess.getScheduleInfo(snifferId);
scheduleInfo.setLastFireTime(new Date());
scheduleInfoAccess.updateScheduleInfo(snifferId, scheduleInfo);
logger.debug("Start sniffing job processing for sniffer with id {} and log source {}", snifferId, logSourceId);
final Sniffer sniffer = snifferPersistence.getSniffer(snifferId);
if (sniffer == null) {
logger.error("Sniffer not found for id {}, stopping cron job for log source {}", snifferId, logSourceId);
deleteJob(jobCtx.getScheduler(), snifferId);
return;
}
final LogSource<LogRawAccess<? extends LogInputStream>> logSource = logSourceProvider
.getSourceById(logSourceId);
if (logSource == null) {
logger.error("Log source not found for id {}, stopping cron job for sniffer {}", logSourceId, snifferId);
deleteJob(jobCtx.getScheduler(), snifferId);
return;
}
try {
sniff(sniffer, logSource, interruption);
} catch (final Exception e) {
logger.error("Failed sniffing in context of sniffer={} and log source={}", sniffer, logSource);
throw new JobExecutionException("Failed sniffing", e, false);
} finally {
logger.debug("Stopped sniffing job processing for sniffer with id {} and log source {}", snifferId,
logSourceId);
}
}
@Override
public void execute(JobExecutionContext ctx) {
ProjectLogger.log(
"ChannelRegistrationScheduler:execute: Running channel registration Scheduler Job at: "
+ Calendar.getInstance().getTime()
+ " triggered by: "
+ ctx.getJobDetail().toString(),
LoggerEnum.INFO.name());
Request request = new Request();
request.setOperation(BackgroundOperations.registerChannel.name());
Response response =
cassandraOperation.getRecordById(
JsonKey.SUNBIRD, JsonKey.SYSTEM_SETTINGS_DB, JsonKey.CHANNEL_REG_STATUS_ID);
List<Map<String, Object>> responseList =
(List<Map<String, Object>>) response.get(JsonKey.RESPONSE);
if (null != responseList && !responseList.isEmpty()) {
Map<String, Object> resultMap = responseList.get(0);
ProjectLogger.log(
"value for CHANNEL_REG_STATUS_ID (003) from SYSTEM_SETTINGS_DB is : "
+ (String) resultMap.get(JsonKey.VALUE));
if (StringUtils.isBlank((String) resultMap.get(JsonKey.VALUE))
&& !Boolean.parseBoolean((String) resultMap.get(JsonKey.VALUE))) {
ProjectLogger.log(
"calling ChannelRegistrationActor from ChannelRegistrationScheduler execute method.");
tellToBGRouter(request);
}
} else {
ProjectLogger.log(
"calling ChannelRegistrationActor from ChannelRegistrationScheduler execute method, "
+ "entry for CHANNEL_REG_STATUS_ID (003) is null.");
tellToBGRouter(request);
}
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
try {
((AWSDatabaseHolder) context.getScheduler().getContext().get(DB_KEY)).rebuild();
} catch (SchedulerException e) {
throw new JobExecutionException(e);
}
}
@Override
protected void executeInternal(final JobExecutionContext jobContext) throws JobExecutionException {
try {
getJobContextHolder(jobContext).updateSearchIndex();
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
throw new JobExecutionException(e);
}
}
public void execute(JobExecutionContext arg0) throws JobExecutionException{
try {
runner(arg0);
} catch (Exception e) {
e.printStackTrace();
log.debug("Exception ("+this.getClass()+") ....................................> "+e.toString());
throw new JobExecutionException(e);
}
}
@Override
protected void executeInternal(JobExecutionContext context)
throws JobExecutionException {
// load jobId
JobKey jobKey = context.getTrigger().getJobKey();
Integer jobId = Integer.valueOf(jobKey.getName());
// trigger
XxlJobTrigger.trigger(jobId);
}
@Override
protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException {
StopWatch stopWatch = StopWatch.createStarted();
log.info("定时[发送邮件消息]任务开始执行......");
stopWatch.stop();
log.info("总计执行时间:{}", stopWatch.getTime());
}
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {
ExternalContent externalContent=new ExternalContent();
externalContent.setAuthenticatedUserId(Authentication.getSystemId());
externalContent.setConnectionManagement("General");
processEngine.setExternalContent(externalContent);
//processEngine.getProcessEngineConfiguration().setConnectionManagement("General");
try {
System.out.println("=====定时任务启动 " + new Date() + " =====");
System.out.println("=====任务参数为:" + new Date() + " =====");
if(jobExecutionContext.getJobDetail().getJobDataMap()!=null)
for (String mapKey : jobExecutionContext.getJobDetail().getJobDataMap().keySet()) {
System.out.println("=====参数 "+mapKey+" : "+jobExecutionContext.getJobDetail().getJobDataMap().get(mapKey) + " =====");
}
executeJob(jobExecutionContext);
processEngine.commitConnection();
System.out.println("=====定时任务启动成功! "+new Date()+ " =====");
} catch (Exception e) {
processEngine.rollBackConnection();
e.printStackTrace();
System.out.println("=====定时任务启动失败! "+new Date()+ " =====");
}
finally{
processEngine.contextClose(true, true);
}
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
JobDetail detail = context.getJobDetail();
JobDataMap data = context.getJobDetail().getJobDataMap();
String name = detail.getKey().getName();
String desc = detail.getDescription();
System.err.println("Job fired: " + name + " (" + desc + ")");
if (data != null && data.size() > 0) {
for (String key : data.keySet()) {
System.err.println(" " + key + " = " + data.getString(key));
}
}
}
@Test
public void executeTest() {
JobExecutionContext mockNetworkManager = Mockito.mock(JobExecutionContext.class);
try {
HTLCJob htlcJob = new HTLCJob();
HTLCTaskFactory htlcTaskFactory = new HTLCTaskFactory();
JobDetail jobDetail =
htlcTaskFactory.loadHTLCJobDetail("HTLC", "HTLC", getHTLCResourcePair());
Mockito.when(mockNetworkManager.getJobDetail()).thenReturn(jobDetail);
htlcJob.execute(mockNetworkManager);
} catch (Exception e) {
Assert.assertTrue(true);
}
}