feat: 新增tkx 测点 聚合

This commit is contained in:
zty 2024-12-05 15:22:33 +08:00
parent 15d37604ea
commit 918df6b345
2 changed files with 33 additions and 25 deletions

View File

@ -16,8 +16,9 @@ from django.db import transaction
from datetime import datetime
from django.utils.timezone import localtime
from apps.enm.services import get_analyse_data_mgroups_duration
from django.db.models import Sum
import logging
myLogger = logging.getLogger('log')
class MpointViewSet(CustomModelViewSet):
"""
list:测点
@ -207,15 +208,22 @@ class MpointStatViewSet(BulkCreateModelMixin, BulkDestroyModelMixin, CustomListM
task = cal_mpointstats_duration.delay(data["start_time"], data["end_time"])
return Response({"task_id": task.task_id})
# @action(methods=["get"], detail=False, perms_map={"get": "*"})
# def group_ana(self, request, *args, **kwargs):
# """
# 测点统计数据聚合查询
@action(methods=["get"], detail=False, perms_map={"get": "*"})
def group_values(self, request, *args, **kwargs):
"""
测点统计数据聚合查询
"""
qs = self.filter_queryset(self.get_queryset())
group_by = request.query_params.get("group_by", None)
group_by_fields = ['mpoint']
if group_by:
group_by_fields.append(group_by)
qs = qs.values()
qs = qs.order_by()
aggreagte_qs = qs.values(*group_by_fields).annotate(total_val = Sum('val')).values('mpoint__name', 'total_val', 'mpoint__nickname', 'mpoint__unit')
result = list(aggreagte_qs)
return Response(result)
# 测点统计数据聚合查询
# """
# qs = self.filter_queryset(self.get_queryset())
# qs.annote
class EnStatViewSet(CustomListModelMixin, CustomGenericViewSet):
"""

View File

@ -107,19 +107,21 @@ def make_sflogs(mgroup: Mgroup, start_date: datetime.date, end_date: datetime.da
start_time -= datetime.timedelta(days=1)
total_sec = (end_time - start_time).total_seconds()
# 创建之前查询最后三条记录
all_teams = SfLog.objects.filter(mgroup=mgroup, start_time__lt=start_time).order_by('-start_time').values_list('team', flat=True)
# 如果最后一条记录的team_id为空则继续向前取三条
from itertools import islice
# 获取前三个非空值并且列表反转
last_teams = list(islice(all_teams, 3))
# 甲乙丙丁的id 是根据车间划分需要处理
last_teams_ids = transfer_id(last_teams)
new_team = get_team(team_ids=list(reversed(last_teams_ids)))
team_obj = Team.objects.filter(id=new_team).first() if new_team else None
leader_obj = getattr(team_obj, 'leader', None)
myLogger.info(f'mgrop_name: {mgroup.name},last_teams: {last_teams}, transfer_id: {last_teams_ids}, new_team: {new_team}---leader_obj---: {leader_obj}' )
# 在创建SfLog记录之前检查SfLog记录中的team_id 如果没有则生成team_id
SfLog.objects.filter(mgroup=mgroup, shift=shift, start_time=start_time, team__isnull=True).update(team=team_obj, leader=leader_obj)
if SfLog.objects.filter(mgroup=mgroup, shift=shift, start_time=start_time, team__isnull=True):
all_teams = SfLog.objects.filter(mgroup=mgroup, start_time__lt=start_time).order_by('-start_time').values_list('team', flat=True)
# 如果最后一条记录的team_id为空则继续向前取三条
from itertools import islice
# 获取前三并且列表反转
last_teams = list(islice(all_teams, 3))
# 甲乙丙丁的id 是根据车间划分需要处理
last_teams_ids = transfer_id(last_teams)
new_team = get_team(team_ids=list(reversed(last_teams_ids)))
team_obj = Team.objects.filter(id=new_team).first() if new_team else None
leader_obj = getattr(team_obj, 'leader', None)
myLogger.info(f'mgrop_name: {mgroup.name},last_teams: {last_teams[::-1]}, transfer_id: {last_teams_ids[::-1]}, team_name: {team_obj.name}---new_team: {new_team}---leader_obj---: {leader_obj}' )
SfLog.objects.filter(mgroup=mgroup, shift=shift, start_time=start_time, team__isnull=True).update(team=team_obj, leader=leader_obj)
else:
pass
# 创建SfLog记录
SfLog.objects.get_or_create(mgroup=mgroup, shift=shift, start_time=start_time, defaults={
"mgroup": mgroup,
@ -130,8 +132,6 @@ def make_sflogs(mgroup: Mgroup, start_date: datetime.date, end_date: datetime.da
"total_sec_now": total_sec,
"total_sec": total_sec,
"create_by": create_by,
"team": team_obj if team_obj else None,
"leader": leader_obj,
})
current_date = current_date + datetime.timedelta(days=1)