|
|
from datetime import timedelta
|
|
|
from celery.schedules import crontab
|
|
|
|
|
|
import os
|
|
|
from peewee import *
|
|
|
import sqlite3
|
|
|
|
|
|
from utils.DBModels.ClientModels import ClientDB
|
|
|
from utils.DBModels.BaseModel import *
|
|
|
from utils.DBModels.ProjectBaseModel import *
|
|
|
|
|
|
try:
|
|
|
dbPath = os.path.join('../../../', 'db', 'client.db')
|
|
|
clientDB = SqliteDatabase(dbPath)
|
|
|
client_proxy = client_proxy
|
|
|
client_proxy.initialize(clientDB)
|
|
|
clientDB.connect()
|
|
|
name = ClientDB.getByName().value
|
|
|
dbPath = os.path.join('../../../project', name, 'db', 'project.db')
|
|
|
conn = sqlite3.connect(dbPath)
|
|
|
cursor = conn.cursor()
|
|
|
cursor.execute("select * from TCPSetting")
|
|
|
allCon = cursor.fetchall()
|
|
|
|
|
|
frequency = float(allCon[0][3])
|
|
|
|
|
|
clientDB.close()
|
|
|
conn.close()
|
|
|
except Exception as e:
|
|
|
print(e)
|
|
|
|
|
|
BROKER_URL = 'redis://127.0.0.1:6379' # 指定 Broker
|
|
|
CELERY_RESULT_BACKEND = 'redis://127.0.0.1:6379/0' # 指定 Backend
|
|
|
|
|
|
CELERY_TIMEZONE='Asia/Shanghai' # 指定时区,默认是 UTC
|
|
|
ENABLE_UTC = False
|
|
|
|
|
|
CELERY_IMPORTS = ( # 指定导入的任务模块
|
|
|
'protocol.Celery.MBTCPSlave.MBTCPSTask',
|
|
|
)
|
|
|
|
|
|
# WORKER_HIJACK_ROOT_LOGGER = False
|
|
|
|
|
|
CELERY_TASK_RESULT_EXPIRES = 10 # 结果过期时间
|
|
|
|
|
|
CELERY_RESULT_SERIALIZER = 'json' # 结果存储格式
|
|
|
|
|
|
# 指定任务序列化方式
|
|
|
CELERY_TASK_SERIALIZER = 'msgpack'
|
|
|
|
|
|
# CELERYD_CONCURRENCY = 1 # 并发worker数
|
|
|
|
|
|
# 指定任务接受的序列化类型.
|
|
|
CELERY_ACCEPT_CONTENT = ["msgpack",'pickle','json']
|
|
|
|
|
|
CELERYD_FORCE_EXECV = True # 非常重要,有些情况下可以防止死锁
|
|
|
|
|
|
CELERYD_TASK_TIME_LIMIT = 200 # 单个任务的运行时间不超过此值,否则会被SIGKILL 信号杀死
|
|
|
|
|
|
# 定时任务
|
|
|
try:
|
|
|
CELERYBEAT_SCHEDULE = {
|
|
|
'add-every-0.1-seconds': {
|
|
|
'task': 'protocol.Celery.MBTCPSlave.MBTCPSTask.readValues',
|
|
|
'schedule': timedelta(seconds = frequency), # 每 30 秒执行一次
|
|
|
'args': () # 任务函数参数
|
|
|
}
|
|
|
}
|
|
|
except Exception as e:
|
|
|
print(e) |