APSwork 部分内容
redis_connet = redis.Redis(host='localhost', port=6379, db=0)
jobstores = {
'redis': RedisJobStore(),
}
executors = {
'default': ThreadPoolExecutor(4)
}
sched = BackgroundScheduler(jobstores=jobstores, executors=executors)
def open_in(servername,serverip,cmd):
print '{0}--open in ....'.format(servername)
.....
def open_out(servername,serverip,cmd):
print '{0}---open out ....'.format(servername)
.....
def cronKaifu(starttime,servername,serverip,cmd_in,cmd_out):
....
jobstores = {
'redis': RedisJobStore(),
}
executors = {
'default': ThreadPoolExecutor(1)
# 'processpool': ProcessPoolExecutor(3)
}
sched = BackgroundScheduler(jobstores=jobstores, executors=executors)
sched.add_job(func=open_in,args=(servername,serverip,cmd_in,),trigger='date',next_run_time=open_in_time,jobstore='redis',id='{0}_in_{1}'.format(servername,sendtime_timestamp_in))
print sched.get_jobs()
sched.add_job(func=open_out,args=(servername,serverip,cmd_out,),trigger='date',next_run_time=open_out_time,jobstore='redis',id='{0}_out_{1}'.format(servername,sendtime_timestamp_out))
print sched.get_jobs()
sched.start()
views.py 里面调用这个任务
def kaifu_time(request):
....
....
cronKaifu(timeArray,server_name,server_ip,open_in_cmd,open_out_cmd)
......
....
return render(request, 'mt/kaifu_time.html', locals())
添加任务后,两次打印 get_jobs()内容
[<Job (id=mt_ios999game_in_1516005540000 name=open_in)>]
[<Job (id=mt_ios999game_in_1516005540000 name=open_in)>, <Job (id=mt_ios999game_out_1516006260000 name=open_out)>]
到点后,id=mt_ios999game_in_1516005540000,会被执行 2 次,有时候会是 3 次,但有时候又是正常的。
[15/Jan/2018 15:02:22] "POST /mt/kaifu_time/ HTTP/1.1" 302 0
[15/Jan/2018 15:02:22] "GET /mt/kaifu_time HTTP/1.1" 301 0
[15/Jan/2018 15:02:22] "GET /mt/kaifu_time/ HTTP/1.1" 200 19539
mt_ios999game--open in ....
mt_ios999game--open in .... <==这里的打印两次
Exception in thread APScheduler:
Traceback (most recent call last):
File "/usr/local/lib/python2.7/threading.py", line 801, in __bootstrap_inner
self.run()
File "/usr/local/lib/python2.7/threading.py", line 754, in run
self.__target(*self.__args, **self.__kwargs)
File "/qzdata1/python_env/LazyOps/lib/python2.7/site-packages/apscheduler/schedulers/blocking.py", line 30, in _main_loop
wait_seconds = self._process_jobs()
File "/qzdata1/python_env/LazyOps/lib/python2.7/site-packages/apscheduler/schedulers/base.py", line 981, in _process_jobs
self.remove_job(job.id, jobstore_alias)
File "/qzdata1/python_env/LazyOps/lib/python2.7/site-packages/apscheduler/schedulers/base.py", line 613, in remove_job
raise JobLookupError(job_id)
JobLookupError: u'No job by the id of mt_ios999game_in_1516005540000 was found'
我把两个 add_job()位置互掉后,即先添加 add_job(func=open_out...) 再添加 add_job(func=open_in..)
然后 open_out 会被执行两次。
这样的问题,我应如何修改。
感觉毫无逻辑出现这问题,一次性任务,会被重复执行,而且是同一时间的。
这是一个专为移动设备优化的页面(即为了让你能够在 Google 搜索结果里秒开这个页面),如果你希望参与 V2EX 社区的讨论,你可以继续到 V2EX 上打开本讨论主题的完整版本。
V2EX 是创意工作者们的社区,是一个分享自己正在做的有趣事物、交流想法,可以遇见新朋友甚至新机会的地方。
V2EX is a community of developers, designers and creative people.