Merge branch 'master' of https://e.coding.net/ctcdevteam/ehs/ehs_server
This commit is contained in:
commit
8aabf1fca0
|
@ -142,7 +142,8 @@ class MpointCache:
|
|||
try:
|
||||
mpoint = Mpoint.objects.get(code=code)
|
||||
except Exception:
|
||||
return None
|
||||
cache.set(key, {}, timeout=None)
|
||||
return {}
|
||||
mpoint_data = MpointSerializer(instance=mpoint).data
|
||||
mpoint_data["last_data"] = {"last_val": None, "last_timex": None, "last_mrs": None} # 初始化
|
||||
if update_mplogx:
|
||||
|
@ -324,7 +325,7 @@ def insert_mplogx_item(code: str, val, timex: datetime, enp_mpoints_dict):
|
|||
"""
|
||||
mc = MpointCache(code)
|
||||
mpoint_data = mc.data
|
||||
if mpoint_data is None or not mpoint_data["enabled"]:
|
||||
if mpoint_data in (None, {}) or not mpoint_data["enabled"]:
|
||||
return
|
||||
|
||||
mpoint_interval = mpoint_data["interval"]
|
||||
|
|
|
@ -42,12 +42,10 @@ def get_current_and_previous_time():
|
|||
def db_insert_mplogx_batch(rows):
|
||||
for row in rows:
|
||||
_, tag_val, tag_code, tag_update = row
|
||||
if cache.get("tag_code", None) is None:
|
||||
continue
|
||||
insert_mplogx_item(tag_code, tag_val, make_aware(tag_update), {})
|
||||
|
||||
@shared_task(base=CustomTask)
|
||||
def db_insert_mplogx():
|
||||
def db_insert_mplogx(limit:bool=True):
|
||||
"""
|
||||
从数据库转存到超表
|
||||
"""
|
||||
|
@ -58,7 +56,7 @@ def db_insert_mplogx():
|
|||
raise Exception("last_tag_id is None")
|
||||
cursor.execute("select count(id) from tag_value where id > %s", (last_tag_id))
|
||||
count = cursor.fetchone()[0]
|
||||
if count > 400:
|
||||
if limit and count > 400:
|
||||
raise Exception("db inset count > 400")
|
||||
cursor.execute(
|
||||
"select id, val, tag_code, update_time from tag_value where id > %s order by id, update_time", (last_tag_id, ))
|
||||
|
@ -85,7 +83,7 @@ def db_ins_mplogx():
|
|||
# raise Exception("db inset count > 400")
|
||||
# materials_name = ['水泥+P.C42.5 袋装', '水泥+P.O42.5R 袋装', '水泥+P.O42.5 散装','水泥+P.O42.5 袋装', '水泥+P.O52.5 散装', '水泥+P.C42.5 散装', '水泥+P.O42.5R 散装']
|
||||
query = """
|
||||
SELECT id, CONCAT('x', inv_name) AS inv_name, de_real_quantity, bill_date
|
||||
SELECT id, de_real_quantity, CONCAT('x', inv_name) AS inv_name, bill_date
|
||||
FROM sa_weigh_view
|
||||
WHERE bill_date > %s
|
||||
ORDER BY id, bill_date
|
||||
|
@ -93,10 +91,9 @@ def db_ins_mplogx():
|
|||
cursor.execute(query, (bill_date,))
|
||||
rows = cursor.fetchall() # 获取数据后保存至本地
|
||||
if rows:
|
||||
bill_date = rows[-1][0]
|
||||
print(rows)
|
||||
bill_date = rows[-1][-1]
|
||||
db_insert_mplogx_batch(rows)
|
||||
update_sysconfig({'enm1': {'bill_date': bill_date}})
|
||||
update_sysconfig({'enm1': {'bill_date': str(bill_date)}})
|
||||
|
||||
|
||||
|
||||
|
@ -146,7 +143,7 @@ def cal_mpointstat_hour(mpointId: str, year: int, month: int, day: int, hour: in
|
|||
val_type = mpoint.val_type
|
||||
if mpoint.type == Mpoint.MT_AUTO:
|
||||
if mpoint.is_unit:
|
||||
val = MpLogx.objects.filter(mpoint=mpoint, timex__gte=dt, timex__lt=dt_hour_n).aggregate(Sum("val"))["val__sum"] or 0
|
||||
val = MpLogx.objects.filter(mpoint=mpoint, timex__gte=dt, timex__lt=dt_hour_n).aggregate(sum=Sum(f'val_{mpoint.val_type}'))["sum"] or 0
|
||||
else:
|
||||
mrs0 = MpLogx.objects.filter(mpoint=mpoint, timex__gte=dt_hour_p, timex__lte=dt).order_by("timex")
|
||||
mrs = MpLogx.objects.filter(mpoint=mpoint, timex__gte=dt, timex__lte=dt_hour_n).order_by("timex")
|
||||
|
|
|
@ -66,10 +66,13 @@ def cal_exp_duration_sec(stlogId: str='', all=False, now: datetime=None):
|
|||
elif all:
|
||||
stlogs = StLog.objects.all()
|
||||
else: # 不传就默认更新未结束的
|
||||
stlogs = StLog.objects.filter(end_time=None)
|
||||
stlogs = StLog.objects.filter(end_time=None)|StLog.objects.filter(duration_sec=None)
|
||||
if now is None:
|
||||
now = timezone.now()
|
||||
for stlog in stlogs:
|
||||
if stlog.duration_sec is None and stlog.end_time:
|
||||
stlog.duration_sec = (stlog.end_time-stlog.start_time).total_seconds()
|
||||
stlog.save()
|
||||
is_shutdown_stlog = True
|
||||
if stlog.is_shutdown is False:
|
||||
is_shutdown_stlog = False
|
||||
|
|
Loading…
Reference in New Issue