You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

68 lines
2.0 KiB
Python

7 months ago
from datetime import timedelta
from celery.schedules import crontab
import os
from peewee import *
import sqlite3
from utils.DBModels.ClientModels import ClientDB
from utils.DBModels.BaseModel import *
from utils.DBModels.ProjectBaseModel import *
try:
dbPath = os.path.join('../../../', 'db', 'client.db')
clientDB = SqliteDatabase(dbPath)
client_proxy = client_proxy
client_proxy.initialize(clientDB)
clientDB.connect()
name = ClientDB.getByName().value
dbPath = os.path.join('../../../project', name, 'db', 'project.db')
conn = sqlite3.connect(dbPath)
cursor = conn.cursor()
cursor.execute("select * from TCPSetting")
allCon = cursor.fetchall()
frequency = float(allCon[0][3])
clientDB.close()
conn.close()
except Exception as e:
print(e)
BROKER_URL = 'redis://127.0.0.1:6379' # 指定 Broker
CELERY_RESULT_BACKEND = 'redis://127.0.0.1:6379/0' # 指定 Backend
CELERY_TIMEZONE='Asia/Shanghai' # 指定时区,默认是 UTC
ENABLE_UTC = False
CELERY_IMPORTS = ( # 指定导入的任务模块
'protocol.Celery.MBTCPMaster.MBTCPMTask',
)
# WORKER_HIJACK_ROOT_LOGGER = False
CELERY_TASK_RESULT_EXPIRES = 5 # 结果过期时间
CELERY_RESULT_SERIALIZER = 'json' # 结果存储格式
# 指定任务序列化方式
CELERY_TASK_SERIALIZER = 'msgpack'
# 指定任务接受的序列化类型.
CELERY_ACCEPT_CONTENT = ["msgpack",'pickle','json']
CELERYD_FORCE_EXECV = True # 非常重要,有些情况下可以防止死锁
CELERYD_TASK_TIME_LIMIT = 60 # 单个任务的运行时间不超过此值否则会被SIGKILL 信号杀死
# 定时任务
try:
CELERYBEAT_SCHEDULE = {
'add-every-0.1-seconds': {
'task': 'protocol.Celery.MBTCPMaster.MBTCPMTask.readValues',
'schedule': timedelta(seconds = frequency), # 每 30 秒执行一次
'args': () # 任务函数参数
}
}
except Exception as e:
print(e)