下面列出了org.quartz.Scheduler#triggerJob ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* 立即执行任务
*/
public static int run(Scheduler scheduler, SysJob job)
{
int rows = 0;
try
{
// 参数
JobDataMap dataMap = new JobDataMap();
dataMap.put(ScheduleConstants.TASK_PROPERTIES, job);
scheduler.triggerJob(getJobKey(job.getJobId()), dataMap);
rows = 1;
}
catch (SchedulerException e)
{
log.error("run 异常:", e);
}
return rows;
}
/**
* 立即执行任务
*/
public static int run(Scheduler scheduler, Job job)
{
int rows = 0;
try
{
// 参数
JobDataMap dataMap = new JobDataMap();
dataMap.put(ScheduleConstants.TASK_PROPERTIES, job);
scheduler.triggerJob(getJobKey(job.getJobId()), dataMap);
rows = 1;
}
catch (SchedulerException e)
{
log.error("run 异常:", e);
}
return rows;
}
public void testLargeContentRequest() throws Exception
{
authenticationComponent.setCurrentUser(USER_ONE);
// create the 5 mb size buffer of zero bytes
byte[] content = new byte[5 * 1024 * 1024];
Arrays.fill(content, (byte)0);
// chek that we can upload file larger than 4 mb
Response response = sendRequest(new PutRequest("/test/largecontenttest", content, "text/plain"), STATUS_OK);
assertEquals(SUCCESS, response.getContentAsString());
// trigger the webscript temp folder cleaner job
CronTrigger webscriptsTempFileCleanerJobTrigger = (CronTrigger) getServer().getApplicationContext().getBean("webscripts.tempFileCleanerTrigger");
Scheduler scheduler = (Scheduler) getServer().getApplicationContext().getBean("schedulerFactory");
scheduler.triggerJob(webscriptsTempFileCleanerJobTrigger.getJobKey());
// check that we still can upload file larger than 4 mb, i.e. ensure that cleaner has not deleted temp folder
response = sendRequest(new PutRequest("/test/largecontenttest", content, "text/plain"), STATUS_OK);
assertEquals(SUCCESS, response.getContentAsString());
}
private void scheduleImport(String file, String siteId) {
JobDataMap jobData = new JobDataMap();
jobData.put("zip", file);
if (siteId != null) {
jobData.put("siteId", siteId);
}
JobDetail jobDetail = JobBuilder.newJob(ImportJob.class)
.withIdentity("Import Job")
.setJobData(jobData)
.build();
Scheduler scheduler = schedulerManager.getScheduler();
try {
scheduler.addJob(jobDetail, true, true);
scheduler.triggerJob(jobDetail.getKey());
} catch (SchedulerException e) {
log.warn("Problem adding job to scheduler to import "+ file, e);
}
}
private void scheduleImport(String file, String siteId) {
JobDataMap jobData = new JobDataMap();
jobData.put("zip", file);
if (siteId != null) {
jobData.put("siteId", siteId);
}
JobDetail jobDetail = JobBuilder.newJob(ImportJob.class)
.withIdentity("Import Job")
.setJobData(jobData)
.build();
Scheduler scheduler = schedulerManager.getScheduler();
try {
scheduler.addJob(jobDetail, true, true);
scheduler.triggerJob(jobDetail.getKey());
} catch (SchedulerException e) {
log.warn("Problem adding job to scheduler to import "+ file, e);
}
}
@POST
@Path("/cockpitData")
@Produces(MediaType.APPLICATION_JSON)
public Response exportCockpitDocumentWidgetData(DocumentExportConf documentExportConf) {
logger.debug("IN");
logger.debug(String.format("document id: %s", documentExportConf.getDocumentId()));
logger.debug(String.format("document label: %s", documentExportConf.getDocumentLabel()));
logger.debug(String.format("export type: %s", documentExportConf.getExportType()));
logger.debug(String.format("parameters: %s", documentExportConf.getParameters()));
JobDetail exportJob = new CockpitDataExportJobBuilder().setDocumentExportConf(documentExportConf).setLocale(request.getLocale())
.setUserProfile(UserProfileManager.getProfile()).build();
logger.debug("Created export job");
try {
Scheduler scheduler = StdSchedulerFactory.getDefaultScheduler();
scheduler.addJob(exportJob, true);
scheduler.triggerJob(exportJob.getName(), exportJob.getGroup());
logger.debug("Export job triggered ");
} catch (SchedulerException e) {
String msg = String.format("Error during scheduling of export job for cokcpit document %d", documentExportConf.getDocumentLabel());
logger.error(msg, e);
throw new SpagoBIRuntimeException(msg);
}
logger.debug("OUT");
return Response.ok().entity(exportJob.getName()).build();
}
public void init() {
if (config == null || serverConfigurationService.getBoolean(config, false)) {
log.info("AutoRun running");
Scheduler scheduler = schedulerManager.getScheduler();
for (JobBeanWrapper job : startup) {
try {
JobDataMap jobData = new JobDataMap();
jobData.put(JobBeanWrapper.SPRING_BEAN_NAME, job.getBeanId());
jobData.put(JobBeanWrapper.JOB_NAME, job.getJobName());
JobDetail jobDetail = JobBuilder.newJob(job.getJobClass())
.withIdentity(job.getJobName(), null)
.setJobData(jobData)
.build();
// Non durable job that will get removed
scheduler.addJob(jobDetail, true, true);
scheduler.triggerJob(jobDetail.getKey());
log.info("Triggered job: {}", job.getJobName());
} catch (SchedulerException se) {
log.warn("Failed to run job: {}", startup, se);
}
}
}
}
public void init() {
if (config == null || serverConfigurationService.getBoolean(config, false)) {
log.info("AutoRun running");
Scheduler scheduler = schedulerManager.getScheduler();
for (JobBeanWrapper job : startup) {
try {
JobDataMap jobData = new JobDataMap();
jobData.put(JobBeanWrapper.SPRING_BEAN_NAME, job.getBeanId());
jobData.put(JobBeanWrapper.JOB_NAME, job.getJobName());
JobDetail jobDetail = JobBuilder.newJob(job.getJobClass())
.withIdentity(job.getJobName(), null)
.setJobData(jobData)
.build();
// Non durable job that will get removed
scheduler.addJob(jobDetail, true, true);
scheduler.triggerJob(jobDetail.getKey());
log.info("Triggered job: {}", job.getJobName());
} catch (SchedulerException se) {
log.warn("Failed to run job: {}", startup, se);
}
}
}
}
public void runTrigger(String schedulerName, String triggerName, String triggerGroup) throws SchedulerException {
Trigger trigger = getTrigger(schedulerName, triggerName, triggerGroup);
Assert.notNull(trigger, "trigger is not exist");
Scheduler scheduler = this.getAssertScheduler(schedulerName);
scheduler.triggerJob(trigger.getJobKey(),trigger.getJobDataMap());
}
/**
* Schedula a job to clean old export.
*
* @throws SchedulerException In case of error during scheduling
*/
private void scheduleCleanUp() throws SchedulerException {
UserProfile userProfile = UserProfileManager.getProfile();
String resoursePath = SpagoBIUtilities.getResourcePath();
String jobName = String.format("delete-old-export-for-%s", userProfile.getUserId());
String jobGroup = "delete-old-export";
String jobDescription = String.format("Delete old exports for user %s", userProfile.getUserId());
JobDataMap jobDataMap = new JobDataMap();
jobDataMap.put(ExportDeleteOldJob.MAP_KEY_RESOURCE_PATH, resoursePath);
jobDataMap.put(ExportDeleteOldJob.MAP_KEY_USER_PROFILE, userProfile);
JobDetail job = new JobDetail(jobName, jobGroup, ExportDeleteOldJob.class);
job.setDescription(jobDescription);
job.setJobDataMap(jobDataMap);
Scheduler scheduler = StdSchedulerFactory.getDefaultScheduler();
scheduler.addJob(job, true);
scheduler.triggerJob(job.getName(), job.getGroup());
}
@PUT
@RolesAllowed({"IbisDataAdmin", "IbisAdmin", "IbisTester"})
@Path("/schedules/{groupName}/job/{jobName}")
@Relation("schedules")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response trigger(@PathParam("jobName") String jobName, @PathParam("groupName") String groupName, LinkedHashMap<String, Object> json) throws ApiException {
Scheduler scheduler = getScheduler();
String commandIssuedBy = servletConfig.getInitParameter("remoteHost");
commandIssuedBy += servletConfig.getInitParameter("remoteAddress");
commandIssuedBy += servletConfig.getInitParameter("remoteUser");
if(log.isInfoEnabled()) log.info("trigger job jobName [" + jobName + "] groupName [" + groupName + "] " + commandIssuedBy);
JobKey jobKey = JobKey.jobKey(jobName, groupName);
String action = ""; //PAUSE,RESUME,TRIGGER
for (Entry<String, Object> entry : json.entrySet()) {
String key = entry.getKey();
if(key.equalsIgnoreCase("action")) {//Start or stop an adapter!
action = (String) entry.getValue();
}
}
try {
if("pause".equals(action)) {
scheduler.pauseJob(jobKey);
}
else if("resume".equals(action)) {
scheduler.resumeJob(jobKey);
}
else if("trigger".equals(action)) {
scheduler.triggerJob(jobKey);
}
else {
throw new ApiException("no (valid) action provided! Expected one of PAUSE,RESUME,TRIGGER");
}
} catch (SchedulerException e) {
throw new ApiException("Failed to "+action+" job", e);
}
return Response.status(Response.Status.OK).build();
}
@Test
public void testAbilityToFireImmediatelyWhenStartedBeforeWithTriggerJob() throws Exception {
List<Long> jobExecTimestamps = Collections.synchronizedList(new ArrayList<Long>());
CyclicBarrier barrier = new CyclicBarrier(2);
Scheduler sched = createScheduler("testAbilityToFireImmediatelyWhenStartedBeforeWithTriggerJob", 5);
sched.getContext().put(BARRIER, barrier);
sched.getContext().put(DATE_STAMPS, jobExecTimestamps);
sched.start();
Thread.yield();
JobDetail job1 = JobBuilder.newJob(TestJobWithSync.class).withIdentity("job1").storeDurably().build();
sched.addJob(job1, false);
long sTime = System.currentTimeMillis();
sched.triggerJob(job1.getKey());
barrier.await(TEST_TIMEOUT_SECONDS, TimeUnit.SECONDS);
sched.shutdown(true);
long fTime = jobExecTimestamps.get(0);
assertTrue("Immediate trigger did not fire within a reasonable amount of time.",
(fTime - sTime < 7000L)); // This is dangerously subjective! but what else to do?
}
/**
* 运行一次任务
*
* @param scheduler scheduler
* @param jobName jobName
* @param jobGroup jobGroup
*/
public static void runOnce(Scheduler scheduler, String jobName, String jobGroup) throws SchedulerException {
JobKey jobKey = JobKey.jobKey(jobName, jobGroup);
scheduler.triggerJob(jobKey);
}