import math from django.db import transaction from rest_framework.decorators import action from rest_framework.exceptions import ParseError from rest_framework.mixins import DestroyModelMixin, ListModelMixin, UpdateModelMixin, CreateModelMixin from rest_framework.response import Response from rest_framework.serializers import Serializer from django.db.models import Sum from django.utils import timezone from apps.system.models import User from apps.mtm.models import Material, Process, Route, Mgroup, RoutePack from apps.utils.viewsets import CustomGenericViewSet, CustomModelViewSet from apps.utils.mixins import CustomListModelMixin, BulkCreateModelMixin, BulkDestroyModelMixin from .filters import StLogFilter, SfLogFilter, WMaterialFilter, MlogFilter, HandoverFilter, MlogbFilter, BatchStFilter from .models import (SfLog, SfLogExp, StLog, WMaterial, Mlog, Handover, Mlogb, Mlogbw, AttLog, OtherLog, Fmlog, BatchSt) from .serializers import (SflogExpSerializer, SfLogSerializer, StLogSerializer, WMaterialSerializer, MlogRevertSerializer, MlogSerializer, MlogRelatedSerializer, DeptBatchSerializer, HandoverSerializer, HandoverUpdateSerializer, GenHandoverSerializer, GenHandoverWmSerializer, MlogAnaSerializer, AttLogSerializer, OtherLogSerializer, MlogInitSerializer, MlogChangeSerializer, MlogbDetailSerializer, MlogbInSerializer, MlogbInUpdateSerializer, MlogbOutUpdateSerializer, FmlogSerializer, FmlogUpdateSerializer, BatchStSerializer, MlogbwCreateUpdateSerializer, HandoverMgroupSerializer) from .services import mlog_submit, handover_submit, mlog_revert from apps.wpm.services import mlog_submit_validate, generate_new_batch from apps.wf.models import State from apps.wpmw.models import Wpr from apps.qm.models import QctMat, Ftest from apps.enm.models import EnStat from django.db.models import Q # Create your views here. class StLogViewSet(CustomModelViewSet): """ list:异常记录 异常记录 """ queryset = StLog.objects.all() serializer_class = StLogSerializer select_related_fields = ['mgroup'] filterset_class = StLogFilter ordering = ['-start_time'] def destroy(self, request, *args, **kwargs): obj: StLog = self.get_object() if obj.is_shutdown: if obj.duration_sec <= 60: pass else: raise ParseError('停机记录不可删除') return super().destroy(request, *args, **kwargs) class SfLogViewSet(UpdateModelMixin, CustomListModelMixin, DestroyModelMixin, CustomGenericViewSet): """ list:值班记录 值班记录 """ perms_map = {'get': '*', 'put': 'sflog.update', 'delete': 'sflog.delete'} queryset = SfLog.objects.all() serializer_class = SfLogSerializer select_related_fields = ['mgroup', 'shift', 'team', 'leader'] filterset_class = SfLogFilter search_fields = ['note'] ordering = ['-start_time'] def add_info_for_list(self, data): sflogs = [i['id'] for i in data] sflogs_dict = EnStat.objects.filter(sflog_id__in=sflogs, type='sflog').values('sflog_id', 'total_production','qua_data','production_hour','elec_consume_unit','run_rate','production_cost_unit') # 创建索引 sflogs_dict = {x['sflog_id']: x for x in sflogs_dict} for item in data: if item['id'] in sflogs_dict: item.update({"enstat_info": sflogs_dict[item['id']]}) return data @action(methods=['get'], detail=True, perms_map={'get': '*'}) def init_test(self, request, pk=None): """ 初始化检测录入 初始化检测录入 """ from apps.qm.models import QuaStat, TestItem from apps.qm.serializers import QuaStatSerializer obj = self.get_object() test_materials = Material.objects.filter( id__in=obj.mgroup.test_materials).order_by('sort', '-create_time') for material in test_materials: testitems = TestItem.objects.filter( id__in=material.testitems).order_by('sort', '-create_time') for testitem in testitems: params = {'material': material, 'testitem': testitem, 'sflog': obj} QuaStat.objects.get_or_create( **params, defaults={**params, **{'create_by': request.user, 'belong_dept': obj.mgroup.belong_dept}}) qs = QuaStat.objects.filter(sflog=obj).order_by( 'material__sort', 'material__create_time', 'testitem__sort', 'testitem__create_time') sr = QuaStatSerializer(instance=qs, many=True) return Response(sr.data) class SfLogExpViewSet(ListModelMixin, UpdateModelMixin, CustomGenericViewSet): """ list:异常值班处理 异常值班处理 """ perms_map = {'get': '*', 'put': 'sflogexp.update'} queryset = SfLogExp.objects.all() serializer_class = SflogExpSerializer select_related_fields = ['sflog', 'sflog__mgroup', 'stlog', 'sflog__team', 'sflog__shift', 'sflog__leader', 'stlog'] filterset_fields = ['sflog', 'stlog'] class WMaterialViewSet(ListModelMixin, CustomGenericViewSet): """ list: 车间库存 车间库存 """ perms_map = {'get': '*'} queryset = WMaterial.objects.filter(count__gt=0) serializer_class = WMaterialSerializer select_related_fields = ['material', 'belong_dept', 'material__process', 'supplier'] search_fields = ['material__name', 'material__number', 'material__specification', 'batch', 'material__model'] filterset_class = WMaterialFilter ordering_fields = ["update_time", "state", "count", "count_xtest"] def filter_queryset(self, queryset): queryset = super().filter_queryset(queryset) if self.request.query_params.get('state_all'): return queryset return queryset.exclude(state=WMaterial.WM_SCRAP) @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=DeptBatchSerializer) @transaction.atomic def batchs(self, request): """获取车间的批次号(废弃) 获取车间的批次号 """ sr = DeptBatchSerializer(data=request.data) sr.is_valid(raise_exception=True) vdata = sr.validated_data batchs = WMaterial.objects.filter( belong_dept__name=vdata['belong_dept_name'], count__gt=0).values_list('batch', flat=True).distinct() return Response(list(batchs)) class MlogViewSet(CustomModelViewSet): """ list: 生产日志 生产日志 """ queryset = Mlog.objects.all() serializer_class = MlogSerializer select_related_fields = ['create_by', 'update_by', 'mtask', 'mtaskb', 'handle_user', 'handle_user_2', 'equipment', 'mgroup__belong_dept', 'equipment_2', 'material_in', 'material_out', 'route__routepack', 'supplier', 'ticket', 'mgroup__process', 'test_user', 'handle_leader', 'test_user'] prefetch_related_fields = ['handle_users', 'material_outs', 'b_mlog', 'equipments'] filterset_class = MlogFilter search_fields = ['material_in__name', 'material_in__number', 'material_in__specification', 'batch', 'material_in__model', 'material_out__name', 'material_out__number', 'material_out__specification', 'material_out__model',] # @transaction.atomic # def perform_create(self, serializer): # ins = serializer.save() # data = MlogSerializer(ins).data # create_auditlog('create', ins, data) @transaction.atomic def perform_destroy(self, instance): if instance.submit_time is not None: raise ParseError('日志已提交不可变动') if instance.ticket and instance.ticket.state != State.STATE_TYPE_START: raise ParseError('该日志存在审批!') # delete_auditlog(instance, instance.id) if instance.ticket: instance.ticket.delete() ftestIds = list(Ftest.objects.filter(mlogbw_ftest__mlogb__mlog=instance).values_list('id', flat=True)) instance.delete() Ftest.objects.filter(id__in=ftestIds).delete() @transaction.atomic def perform_update(self, serializer): ins = serializer.instance if ins.ticket and ins.ticket.state != State.STATE_TYPE_START: raise ParseError('该日志在审批中不可修改!') if ins.submit_time is not None: raise ParseError('该日志已提交!') # val_old = MlogSerializer(instance=ins).data serializer.save() # val_new = MlogSerializer(instance=ins).data # create_auditlog('update', ins, val_new, val_old) @action(methods=['post'], detail=False, perms_map={'post': 'mlog.init'}, serializer_class=MlogInitSerializer) def init(self, request, *args, **kwargs): """初始化日志 初始化日志 """ sr = self.get_serializer(data=request.data) sr.is_valid(raise_exception=True) ins = sr.save() return Response(MlogSerializer(ins).data) @action(methods=['post'], detail=True, perms_map={'post': 'mlog.update'}, serializer_class=MlogChangeSerializer) def change(self, request, *args, **kwargs): """修改日志 修改日志 """ ins = self.get_object() if ins.ticket and ins.ticket.state != State.STATE_TYPE_START: raise ParseError('该日志在审批中不可修改!') sr = MlogChangeSerializer(instance=ins, data=request.data) sr.is_valid(raise_exception=True) sr.save() return Response(MlogSerializer(ins).data) @action(methods=['post'], detail=True, perms_map={'post': 'mlog.submit'}, serializer_class=Serializer) def submit(self, request, *args, **kwargs): """日志提交(变动车间库存) 日志提交 """ ins: Mlog = self.get_object() now = timezone.now() if ins.ticket: raise ParseError('该日志存在审批!') else: p: Process = ins.mgroup.process if p.mlog_need_ticket: raise ParseError('该日志需要审批!') mlog_submit_validate(ins) with transaction.atomic(): mlog_submit(ins, self.request.user, now) vdata_new = MlogSerializer(ins).data # create_auditlog('submit', ins, vdata_new, # vdata_old, now, self.request.user) return Response(vdata_new) @action(methods=['post'], detail=True, perms_map={'post': 'mlog.submit'}, serializer_class=MlogRevertSerializer) def revert(self, request, *args, **kwargs): """撤回日志提交 撤回日志提交 """ ins: Mlog = self.get_object() if ins.ticket: raise ParseError('该日志存在审批!') user = request.user if ins.submit_time is None: raise ParseError('日志未提交不可撤销') if user != ins.submit_user: raise ParseError('非提交人不可撤销!') now = timezone.now() with transaction.atomic(): mlog_revert(ins, user, now) # create_auditlog('revert', ins, {}, {}, now, user, # request.data.get('change_reason', '')) return Response(MlogSerializer(instance=ins).data) @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=MlogRelatedSerializer) @transaction.atomic def related_first(self, request, *args, **kwargs): """获取相关任务的第一道工序日志 获取相关任务的第一道工序日志 """ sr = MlogRelatedSerializer(data=request.data) sr.is_valid(raise_exception=True) vdata = sr.validated_data mtask = vdata['mtask'] if mtask.utask: mtasks = mtask.related mlogs = Mlog.objects.filter(mtask__in=mtasks).order_by( 'mtask__mgroup__process__sort', 'batch', 'create_time') data = MlogSerializer(instance=mlogs, many=True).data res_data = [] for ind, val in enumerate(data): if ind == 0: res_data.append(val) else: before = data[ind-1] if val['batch'] != before['batch']: res_data.append(val) return Response(res_data) @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=MlogAnaSerializer) def ana(self, request): """核心统计数据 核心统计数据 """ sr = MlogAnaSerializer(data=request.data) sr.is_valid(raise_exception=True) vdata = sr.validated_data mlogs = Mlog.objects.exclude(submit_time=None) if vdata.get('belong_dept_name', ''): mlogs = mlogs.filter( mgroup__belong_dept__name=vdata['belong_dept_name']) if vdata.get('material_cate', ''): mlogs = mlogs.filter( material_out__cate=vdata['material_cate']) if vdata.get('start_date', ''): mlogs = mlogs.filter(handle_date__gte=vdata['start_date']) if vdata.get('end_date', ''): mlogs = mlogs.filter(handle_date__lte=vdata['end_date']) res = mlogs.aggregate( count_real=Sum('count_real'), count_ok=Sum('count_ok'), count_notok=Sum('count_notok'), count_n_zw=Sum('count_n_zw'), count_n_tw=Sum('count_n_tw'), count_n_qp=Sum('count_n_qp'), count_n_wq=Sum('count_n_wq'), count_n_dl=Sum('count_n_dl'), count_n_pb=Sum('count_n_pb'), count_n_dxt=Sum('count_n_dxt'), count_n_jsqx=Sum('count_n_jsqx'), count_n_qt=Sum('count_n_qt')) for i in res: if res[i] is None: res[i] = 0 return Response(res) class HandoverViewSet(CustomModelViewSet): """ list: 交接记录 交接记录 """ queryset = Handover.objects.all() serializer_class = HandoverSerializer update_serializer_class = HandoverUpdateSerializer select_related_fields = ['send_user', 'send_mgroup', 'send_dept', 'recive_user', 'recive_mgroup', 'recive_dept', 'wm'] filterset_class = HandoverFilter search_fields = ['id', 'material__name', 'material__number', 'material__specification', 'batch', 'material__model'] def perform_destroy(self, instance): user = self.request.user if instance.submit_time is not None: raise ParseError('日志已提交不可变动') if instance.send_user != user and instance.recive_user != user and instance.create_by != user: raise ParseError('非交送人和接收人不可删除该记录') return super().perform_destroy(instance) @action(methods=['post'], detail=True, perms_map={'post': 'handover.submit'}, serializer_class=Serializer) @transaction.atomic def submit(self, request, *args, **kwargs): """交接记录提交(变动车间库存) 交接记录提交 """ ins: Handover = self.get_object() user: User = self.request.user if ins.type != Handover.H_SCRAP: if user == ins.recive_user or user.belong_dept == ins.recive_user.belong_dept: pass else: raise ParseError('非接收人不可提交') if ins.submit_time is None: handover_submit(ins, user, None) return Response() @action(methods=['post'], detail=True, perms_map={'post': '*'}, serializer_class=HandoverMgroupSerializer) @transaction.atomic def mgroups(self, request, *args, **kwargs): """获取可交接到的工段 获取可交接到的工段 """ sr = HandoverMgroupSerializer(data=request.data) sr.is_valid(raise_exception=True) vdata = sr.validated_data materialInId = vdata['material_in'] type = vdata['type'] m_qs = Mgroup.objects.all() if type in [Handover.H_NORMAL, Handover.H_CHANGE]: m_qs = m_qs.filter(process__route_p_routepack__state=RoutePack.RP_S_CONFIRM) m_qs = m_qs.filter( process__route_p__material_in__id=materialInId)|m_qs.filter( process__route_p__routemat_route__material__id=materialInId ) elif type in [Handover.H_SCRAP]: m_qs = m_qs.filter(process=None) return Response(list(m_qs.values('id', 'name'))) @action(methods=['post'], detail=False, perms_map={'post': 'handover.create'}, serializer_class=GenHandoverWmSerializer) @transaction.atomic def gen_by_wm(self, request): """从车间库存生成交接记录(废弃) 从车间库存生成交接记录 """ sr = GenHandoverWmSerializer(data=request.data) sr.is_valid(raise_exception=True) vdata = sr.validated_data user = request.user send_date, send_mgroup, send_user, recive_dept, recive_user, wm, count = vdata['send_date'], vdata['send_mgroup'], vdata[ 'send_user'], vdata['recive_dept'], vdata['recive_user'], vdata['wm'], vdata['count'] if send_mgroup.belong_dept != wm.belong_dept: raise ParseError('送料工段错误!') handover = Handover.objects.create( send_date=send_date, send_user=send_user, recive_dept=recive_dept, recive_user=recive_user, send_mgroup=send_mgroup, send_dept=wm.belong_dept, batch=wm.batch, material=wm.material, count=count, wm=wm, create_by=user ) return Response({'handover': handover.id}) @action(methods=['post'], detail=False, perms_map={'post': 'handover.create'}, serializer_class=GenHandoverSerializer) @transaction.atomic def gen_by_mlog(self, request): """从生产日志生成交接记录(废弃) 从生产日志生成交接记录 """ sr = GenHandoverSerializer(data=request.data) sr.is_valid(raise_exception=True) vdata = sr.validated_data user = request.user send_date, send_user, recive_dept, recive_user = vdata['send_date'], vdata[ 'send_user'], vdata['recive_dept'], vdata['recive_user'] for mlog in vdata['mlogs']: Handover.objects.create( send_date=send_date, send_user=send_user, recive_dept=recive_dept, recive_user=recive_user, send_dept=mlog.mgroup.belong_dept, batch=mlog.batch, material=mlog.material_out, count=mlog.count_real, count_eweight=mlog.count_real_eweight, mlog=mlog, send_mgroup=mlog.mgroup, create_by=user ) return Response() class AttlogViewSet(CustomModelViewSet): """ list: 车间到岗 车间到岗 """ queryset = AttLog.objects.all() serializer_class = AttLogSerializer select_related_fields = ['user', 'post', 'sflog'] filterset_fields = ['sflog__mgroup', 'sflog__mgroup__belong_dept__name', 'sflog__work_date', 'sflog__mgroup__cate', 'sflog__mgroup__need_enm'] ordering = ['-sflog__work_date', 'create_time'] class OtherLogViewSet(CustomModelViewSet): """ list: 其他生产记录 其他生产记录 """ queryset = OtherLog.objects.all() serializer_class = OtherLogSerializer filterset_fields = { "product": ["exact"], "handle_date": ["exact", "gte", "lte"] } search_fields = ['product'] ordering = ['-handle_date', '-create_time'] class MlogbViewSet(CustomListModelMixin, CustomGenericViewSet): perms_map = {'get': '*'} queryset = Mlogb.objects.all() serializer_class = MlogbDetailSerializer filterset_class = MlogbFilter ordering = ["create_time"] class MlogbInViewSet(CreateModelMixin, UpdateModelMixin, DestroyModelMixin, CustomGenericViewSet): perms_map = {'post': 'mlog.update', 'delete': 'mlog.update', 'put': 'mlog.update'} queryset = Mlogb.objects.filter(material_in__isnull=False) serializer_class = MlogbInSerializer update_serializer_class = MlogbInUpdateSerializer @transaction.atomic def perform_destroy(self, instance): ins: Mlogb = instance if ins.mlog.submit_time is not None: raise ParseError('生产日志已提交不可编辑') qs = None if ins.mlogb_to: qs = Mlogb.objects.filter(id=ins.mlogb_to.id) ftestIds = list(Ftest.objects.filter(mlogbw_ftest__mlogb__in=qs).values_list('id', flat=True)) qs.delete() if ftestIds: Ftest.objects.filter(id__in=ftestIds).delete() elif ins.parent is None and ins.mtask: query_dict = {"material_out__isnull": False, "mlog": ins.mlog, "mtask": ins.mtask} route = ins.mtask.route if route.batch_bind: query_dict["batch__contains"] = ins.batch qs = Mlogb.objects.filter(**query_dict) # 需要删除子集ftest记录 if qs: ftestIds = list(Ftest.objects.filter(mlogbw_ftest__mlogb__in=qs).values_list('id', flat=True)) qs.delete() if ftestIds: Ftest.objects.filter(id__in=ftestIds).delete() ins.delete() @transaction.atomic def perform_create(self, serializer): mlogbin: Mlogb = serializer.save() mlog:Mlog = mlogbin.mlog route:Route = mlog.route # 以及mlogbw if mlogbin.mtask and mlogbin.material_in: material_in:Material = mlogbin.material_in material_out:Material = mlog.material_out if material_out is None: raise ParseError('产物不可为空') # 如果是主要输入物料且是主批次,才需生成输出 if route.material_in != material_in or mlogbin.parent is not None: return m_dict = { "mtask": mlogbin.mtask, "mlog": mlog, "material_out": material_out, } m_dict['batch'] = generate_new_batch(mlogbin.batch, mlog) wm_in: WMaterial = mlogbin.wm_in mlogbout, is_create = Mlogb.objects.get_or_create(**m_dict, defaults= {"batch_ofrom": wm_in.batch_ofrom, "material_ofrom": wm_in.material_ofrom}) if is_create and route: d_count_real = 0 d_count_ok = 0 if route.process.mtype == Process.PRO_NORMAL: d_count_real = mlogbin.count_use d_count_ok = mlogbin.count_use elif route.process.mtype == Process.PRO_DIV: xcount = mlogbin.count_use * route.div_number d_count_real = xcount d_count_ok = xcount elif route.process.mtype == Process.PRO_MERGE: xcount = math.floor( mlogbin.count_use / route.div_number) d_count_real = xcount d_count_ok = xcount # 找寻质检表 if material_out.tracking == Material.MA_TRACKING_SINGLE: qctmat = QctMat.objects.filter(material=material_out, qct__is_deleted=False).order_by("-create_time").first() mlogbout.qct = qctmat.qct if qctmat else None mlogbout.count_real = d_count_real mlogbout.count_ok = d_count_ok mlogbout.save() mlogbin.mlogb_to = mlogbout mlogbin.save() if material_in.tracking == Material.MA_TRACKING_SINGLE: m_out_t = material_out.tracking if mlogbin.count_use == wm_in.count: # 自动创建mlogbw for wpr in Wpr.objects.filter(wm=wm_in).order_by("number"): Mlogbw.objects.get_or_create(wpr=wpr, mlogb=mlogbin, defaults={"number": wpr.number}) if m_out_t == Material.MA_TRACKING_SINGLE: if route.process.mtype == Process.PRO_NORMAL: Mlogbw.objects.get_or_create(wpr=wpr, mlogb=mlogbout, defaults={"number": wpr.number}) elif route.process.mtype == Process.PRO_DIV: for i in range(route.div_number): Mlogbw.objects.get_or_create(mlogb=mlogbout, number=f'{wpr.number}-{i+1}') class MlogbOutViewSet(UpdateModelMixin, CustomGenericViewSet): perms_map = {"put": "mlog.update"} queryset = Mlogb.objects.filter(material_out__isnull=False) serializer_class = MlogbOutUpdateSerializer class FmlogViewSet(CustomModelViewSet): perms_map = {'get': '*', 'post': 'mlog.create', 'put': 'mlog.update', 'delete': 'mlog.delete'} queryset = Fmlog.objects.all() serializer_class = FmlogSerializer update_serializer_class = FmlogUpdateSerializer filterset_fields = ['mtask', 'mgroup', 'mtask__route'] select_related_fields = ['mtask', 'mgroup', 'mtask__route', 'mtask__route__routepack'] def destroy(self, request, *args, **kwargs): ins = self.get_object() if Mlog.objects.filter(fmlog=ins).exists(): raise ParseError('因存在二级日志不可删除') return super().destroy(request, *args, **kwargs) class BatchStViewSet(ListModelMixin, CustomGenericViewSet): """ list: 批次统计数据 批次统计数据 """ perms_map = {"get": "*"} queryset = BatchSt.objects.all() serializer_class = BatchStSerializer ordering_fields = ["batch", "last_time", "update_time"] ordering = ["batch"] filterset_class = BatchStFilter class MlogbwViewSet(CustomModelViewSet): perms_map = {"get": "*", "post": "mlog.update", "put": "mlog.update", "delete": "mlog.update"} queryset = Mlogbw.objects.all() serializer_class = MlogbwCreateUpdateSerializer filterset_fields = ['mlogb'] ordering = ["number"] ordering_fields = ["number", "create_time"] def filter_queryset(self, queryset): if not self.detail and not self.request.query_params.get('mlogb', None): raise ParseError('请指定所属消耗/产出明细') return super().filter_queryset(queryset) def cal_mlogb_count(self, mlogb): count = Mlogbw.objects.filter(mlogb=mlogb).count() # 此处先不管检验问题 if mlogb.material_in: mlogb.count_use = count elif mlogb.material_out: mlogb.count_real = count mlogb.count_notok = Mlogbw.objects.filter(mlogb=mlogb, ftest__is_ok=False).count() mlogb.count_ok = count - mlogb.count_notok mlogb.save() @transaction.atomic def perform_create(self, serializer): ins:Mlogbw = serializer.save() route:Route = ins.mlogb.mlog.route mlogb:Mlogb = ins.mlogb self.cal_mlogb_count(mlogb) # 如果是输入且输出追踪到个,需同步创建 material_in:Material = mlogb.material_in if material_in is not None: mlogb_to = mlogb.mlogb_to material_out:Material = mlogb.mlog.material_out if mlogb_to and material_out.tracking == Material.MA_TRACKING_SINGLE: if route.process.mtype == Process.PRO_NORMAL: Mlogbw.objects.get_or_create(mlogb=mlogb_to, wpr=ins.wpr, defaults={"number": ins.number}) elif route.process.mtype == Process.PRO_DIV: for i in range(route.div_number): Mlogbw.objects.get_or_create(mlogb=mlogb_to, number=f'{ins.number}-{i+1}') self.cal_mlogb_count(mlogb_to) @transaction.atomic def perform_update(self, serializer): mlogbw = serializer.save() self.cal_mlogb_count(mlogbw.mlogb) @transaction.atomic def perform_destroy(self, instance:Mlogbw): mlogb = instance.mlogb ftest = instance.ftest instance.delete() if ftest: ftest.delete() self.cal_mlogb_count(mlogb) # 如果是输入且输出追踪到个,需同步删除 material_in: Material = mlogb.material_in if material_in is not None: mlogb_to = mlogb.mlogb_to material_out: Material = mlogb.mlog.material_out if mlogb_to and material_out.tracking == Material.MA_TRACKING_SINGLE: mbws = Mlogbw.objects.filter(Q(wpr=instance.wpr)|Q(number__contains=instance.number), mlogb=mlogb_to) Ftest.objects.filter(id__in=mbws.values_list('ftest__id', flat=True)).delete() mbws.delete() self.cal_mlogb_count(mlogb_to)