762 lines
		
	
	
		
			33 KiB
		
	
	
	
		
			Python
		
	
	
	
			
		
		
	
	
			762 lines
		
	
	
		
			33 KiB
		
	
	
	
		
			Python
		
	
	
	
import math
 | 
						|
 | 
						|
from django.db import transaction
 | 
						|
from rest_framework.decorators import action
 | 
						|
from rest_framework.exceptions import ParseError
 | 
						|
from rest_framework.mixins import DestroyModelMixin, ListModelMixin, UpdateModelMixin, CreateModelMixin
 | 
						|
from rest_framework.response import Response
 | 
						|
from rest_framework.serializers import Serializer
 | 
						|
from django.db.models import Sum
 | 
						|
from django.utils import timezone
 | 
						|
from apps.system.models import User
 | 
						|
 | 
						|
from apps.mtm.models import Material, Process, Route, Mgroup, RoutePack
 | 
						|
from apps.utils.viewsets import CustomGenericViewSet, CustomModelViewSet
 | 
						|
from apps.utils.mixins import CustomListModelMixin, BulkCreateModelMixin, BulkDestroyModelMixin
 | 
						|
 | 
						|
from .filters import StLogFilter, SfLogFilter, WMaterialFilter, MlogFilter, HandoverFilter, MlogbFilter, BatchStFilter
 | 
						|
from .models import (SfLog, SfLogExp, StLog, WMaterial, Mlog, Handover, Mlogb,
 | 
						|
                     Mlogbw, AttLog, OtherLog, Fmlog, BatchSt, MlogbDefect)
 | 
						|
from .serializers import (SflogExpSerializer, SfLogSerializer, StLogSerializer, WMaterialSerializer,
 | 
						|
                          MlogRevertSerializer,
 | 
						|
                          MlogSerializer, MlogRelatedSerializer, DeptBatchSerializer, HandoverSerializer,
 | 
						|
                          HandoverUpdateSerializer,
 | 
						|
                          GenHandoverSerializer, GenHandoverWmSerializer, MlogAnaSerializer,
 | 
						|
                          AttLogSerializer, OtherLogSerializer, MlogInitSerializer, MlogChangeSerializer,
 | 
						|
                          MlogbDetailSerializer, MlogbInSerializer, MlogbInUpdateSerializer,
 | 
						|
                          MlogbOutUpdateSerializer, FmlogSerializer, FmlogUpdateSerializer, BatchStSerializer,
 | 
						|
                          MlogbwCreateUpdateSerializer, HandoverMgroupSerializer, MlogListSerializer, MlogbSerializer)
 | 
						|
from .services import mlog_submit, handover_submit, mlog_revert
 | 
						|
from apps.wpm.services import mlog_submit_validate, generate_new_batch
 | 
						|
from apps.wf.models import State
 | 
						|
from apps.wpmw.models import Wpr
 | 
						|
from apps.qm.models import Qct, Ftest, TestItem
 | 
						|
from apps.enm.models import EnStat
 | 
						|
from django.db.models import Q
 | 
						|
from apps.utils.tools import convert_ordereddict, update_dict
 | 
						|
from django.db.models import Count
 | 
						|
 | 
						|
 | 
						|
# Create your views here.
 | 
						|
 | 
						|
 | 
						|
class StLogViewSet(CustomModelViewSet):
 | 
						|
    """
 | 
						|
    list:异常记录
 | 
						|
 | 
						|
    异常记录
 | 
						|
    """
 | 
						|
    queryset = StLog.objects.all()
 | 
						|
    serializer_class = StLogSerializer
 | 
						|
    select_related_fields = ['mgroup']
 | 
						|
    filterset_class = StLogFilter
 | 
						|
    ordering = ['-start_time']
 | 
						|
 | 
						|
    def destroy(self, request, *args, **kwargs):
 | 
						|
        obj: StLog = self.get_object()
 | 
						|
        if obj.is_shutdown:
 | 
						|
            if obj.duration_sec <= 60:
 | 
						|
                pass
 | 
						|
            else:
 | 
						|
                raise ParseError('停机记录不可删除')
 | 
						|
        return super().destroy(request, *args, **kwargs)
 | 
						|
 | 
						|
 | 
						|
class SfLogViewSet(UpdateModelMixin, CustomListModelMixin, DestroyModelMixin, CustomGenericViewSet):
 | 
						|
    """
 | 
						|
    list:值班记录
 | 
						|
 | 
						|
    值班记录
 | 
						|
    """
 | 
						|
    perms_map = {'get': '*', 'put': 'sflog.update', 'delete': 'sflog.delete'}
 | 
						|
    queryset = SfLog.objects.all()
 | 
						|
    serializer_class = SfLogSerializer
 | 
						|
    select_related_fields = ['mgroup', 'shift', 'team', 'leader']
 | 
						|
    filterset_class = SfLogFilter
 | 
						|
    search_fields = ['note']
 | 
						|
    ordering = ['-start_time']
 | 
						|
 | 
						|
    def add_info_for_list(self, data):
 | 
						|
        sflogs = [i['id'] for i in data]
 | 
						|
        sflogs_dict = EnStat.objects.filter(sflog_id__in=sflogs, type='sflog').values('sflog_id', 'total_production','qua_data','production_hour','elec_consume_unit','run_rate','production_cost_unit')
 | 
						|
        # 创建索引
 | 
						|
        sflogs_dict = {x['sflog_id']: x for x in sflogs_dict}
 | 
						|
        for item in data:
 | 
						|
            if item['id'] in sflogs_dict:
 | 
						|
                item.update({"enstat_info": sflogs_dict[item['id']]})
 | 
						|
        return data
 | 
						|
    
 | 
						|
    @action(methods=['get'], detail=True, perms_map={'get': '*'})
 | 
						|
    def init_test(self, request, pk=None):
 | 
						|
        """
 | 
						|
        初始化检测录入
 | 
						|
 | 
						|
        初始化检测录入
 | 
						|
        """
 | 
						|
        from apps.qm.models import QuaStat, TestItem
 | 
						|
        from apps.qm.serializers import QuaStatSerializer
 | 
						|
        obj = self.get_object()
 | 
						|
        test_materials = Material.objects.filter(
 | 
						|
            id__in=obj.mgroup.test_materials).order_by('sort', '-create_time')
 | 
						|
        for material in test_materials:
 | 
						|
            testitems = TestItem.objects.filter(
 | 
						|
                id__in=material.testitems).order_by('sort', '-create_time')
 | 
						|
            for testitem in testitems:
 | 
						|
                params = {'material': material,
 | 
						|
                          'testitem': testitem, 'sflog': obj}
 | 
						|
                QuaStat.objects.get_or_create(
 | 
						|
                    **params, defaults={**params, **{'create_by': request.user, 'belong_dept': obj.mgroup.belong_dept}})
 | 
						|
        qs = QuaStat.objects.filter(sflog=obj).order_by(
 | 
						|
            'material__sort', 'material__create_time', 'testitem__sort', 'testitem__create_time')
 | 
						|
        sr = QuaStatSerializer(instance=qs, many=True)
 | 
						|
        return Response(sr.data)
 | 
						|
    
 | 
						|
    
 | 
						|
class SfLogExpViewSet(ListModelMixin, UpdateModelMixin, CustomGenericViewSet):
 | 
						|
    """
 | 
						|
    list:异常值班处理
 | 
						|
 | 
						|
    异常值班处理
 | 
						|
    """
 | 
						|
    perms_map = {'get': '*', 'put': 'sflogexp.update'}
 | 
						|
    queryset = SfLogExp.objects.all()
 | 
						|
    serializer_class = SflogExpSerializer
 | 
						|
    select_related_fields = ['sflog', 'sflog__mgroup', 'stlog', 'sflog__team', 'sflog__shift', 'sflog__leader', 'stlog']
 | 
						|
    filterset_fields = ['sflog', 'stlog']
 | 
						|
 | 
						|
 | 
						|
class WMaterialViewSet(ListModelMixin, CustomGenericViewSet):
 | 
						|
    """
 | 
						|
    list: 车间库存
 | 
						|
 | 
						|
    车间库存
 | 
						|
    """
 | 
						|
    perms_map = {'get': '*'}
 | 
						|
    queryset = WMaterial.objects.filter(count__gt=0)
 | 
						|
    serializer_class = WMaterialSerializer
 | 
						|
    select_related_fields = ['material', 'belong_dept', 'material__process', 'supplier']
 | 
						|
    search_fields = ['material__name',
 | 
						|
                    'material__number', 'material__specification', 'batch', 'material__model', "defect__name", "notok_sign"]
 | 
						|
    filterset_class = WMaterialFilter
 | 
						|
    ordering_fields = ["update_time", "state", "count", "count_xtest"]
 | 
						|
 | 
						|
    def filter_queryset(self, queryset):
 | 
						|
        queryset = super().filter_queryset(queryset)
 | 
						|
        if self.request.query_params.get('state_all'):
 | 
						|
            return queryset
 | 
						|
        return queryset.exclude(state=WMaterial.WM_SCRAP)
 | 
						|
 | 
						|
    @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=DeptBatchSerializer)
 | 
						|
    @transaction.atomic
 | 
						|
    def batchs(self, request):
 | 
						|
        """获取车间的批次号(废弃)
 | 
						|
 | 
						|
        获取车间的批次号
 | 
						|
        """
 | 
						|
        sr = DeptBatchSerializer(data=request.data)
 | 
						|
        sr.is_valid(raise_exception=True)
 | 
						|
        vdata = sr.validated_data
 | 
						|
        batchs = WMaterial.objects.filter(
 | 
						|
            belong_dept__name=vdata['belong_dept_name'], count__gt=0).values_list('batch', flat=True).distinct()
 | 
						|
        return Response(list(batchs))
 | 
						|
 | 
						|
 | 
						|
class MlogViewSet(CustomModelViewSet):
 | 
						|
    """
 | 
						|
    list: 生产日志
 | 
						|
 | 
						|
    生产日志
 | 
						|
    """
 | 
						|
    queryset = Mlog.objects.all()
 | 
						|
    serializer_class = MlogSerializer
 | 
						|
    list_serializer_class = MlogListSerializer
 | 
						|
    select_related_fields = ['create_by', 'update_by', 'mtask', 'mtaskb',
 | 
						|
                             'handle_user', 'handle_user_2', 'equipment', 'mgroup__belong_dept',
 | 
						|
                             'equipment_2', 'material_in', 'material_out', 'route__routepack', 
 | 
						|
                             'supplier', 'ticket', 'mgroup__process', 'test_user', 'handle_leader', 'test_user']
 | 
						|
    prefetch_related_fields = ['handle_users',
 | 
						|
                               'material_outs', 'b_mlog', 'equipments']
 | 
						|
    filterset_class = MlogFilter
 | 
						|
    search_fields = ['material_in__name',
 | 
						|
                    'material_in__number', 'material_in__specification', 'batch', 'material_in__model',
 | 
						|
                    'material_out__name', 'material_out__number', 'material_out__specification', 'material_out__model',]
 | 
						|
 | 
						|
    def add_info_for_item(self, data):
 | 
						|
        if data.get("oinfo_json", {}):
 | 
						|
            czx_dict = dict(TestItem.objects.filter(id__in=data.get("oinfo_json", {}).keys()).values_list('id', 'name'))
 | 
						|
            data["oinfo_json_"] = {czx_dict.get(k, k): v for k, v in data.get("oinfo_json", {}).items()}
 | 
						|
        return data
 | 
						|
 | 
						|
    def add_info_for_list(self, data):
 | 
						|
        czx_dict = {}
 | 
						|
        for item in data:
 | 
						|
            czx_dict.update(item.get("oinfo_json", {}))
 | 
						|
        czx_dict = dict(TestItem.objects.filter(id__in=czx_dict.keys()).values_list('id', 'name'))
 | 
						|
        for item in data:
 | 
						|
            if item.get("oinfo_json", None):
 | 
						|
                item["oinfo_json_"] = {czx_dict.get(k, k): v for k, v in item.get("oinfo_json", {}).items()}
 | 
						|
        if self.request.query_params.get('with_mlogb', False):
 | 
						|
            data_dict = {item['id']: {**item, "mlogb_full": [], "mlogb": []} for item in data}
 | 
						|
            mlogb_qs = Mlogb.objects.filter(mlog__id__in=data_dict.keys()).select_related("material_in", "material_out").order_by("create_time")
 | 
						|
            mlogb_data = MlogbSerializer(instance=mlogb_qs, many=True).data
 | 
						|
            for item in mlogb_data:
 | 
						|
                item_dict = convert_ordereddict(item)
 | 
						|
                if item_dict["mlog"] in data_dict:
 | 
						|
                    data_dict[item_dict["mlog"]]["mlogb_full"].append(item_dict)
 | 
						|
                    if item.get("material_out", None):
 | 
						|
                        data_dict[item_dict["mlog"]]["mlogb"].append(item_dict)
 | 
						|
            data = list(data_dict.values())
 | 
						|
        return data
 | 
						|
 | 
						|
    @transaction.atomic
 | 
						|
    def perform_destroy(self, instance):
 | 
						|
        if instance.submit_time is not None:
 | 
						|
            raise ParseError('日志已提交不可变动')
 | 
						|
        if instance.ticket and instance.ticket.state != State.STATE_TYPE_START:
 | 
						|
            raise ParseError('该日志存在审批!')
 | 
						|
        # delete_auditlog(instance, instance.id)
 | 
						|
        if instance.ticket:
 | 
						|
            instance.ticket.delete()
 | 
						|
        ftestIds = list(Ftest.objects.filter(mlogbw_ftest__mlogb__mlog=instance).values_list('id', flat=True))
 | 
						|
        instance.delete()
 | 
						|
        Ftest.objects.filter(id__in=ftestIds).delete()
 | 
						|
 | 
						|
    @transaction.atomic
 | 
						|
    def perform_update(self, serializer):
 | 
						|
        ins = serializer.instance
 | 
						|
        if ins.ticket and ins.ticket.state != State.STATE_TYPE_START:
 | 
						|
            raise ParseError('该日志在审批中不可修改!')
 | 
						|
        if ins.submit_time is not None:
 | 
						|
            raise ParseError('该日志已提交!')
 | 
						|
        # val_old = MlogSerializer(instance=ins).data
 | 
						|
        serializer.save()
 | 
						|
        # val_new = MlogSerializer(instance=ins).data
 | 
						|
        # create_auditlog('update', ins, val_new, val_old)
 | 
						|
 | 
						|
    @action(methods=['post'], detail=False, perms_map={'post': 'mlog.init'}, serializer_class=MlogInitSerializer)
 | 
						|
    def init(self, request, *args, **kwargs):
 | 
						|
        """初始化日志
 | 
						|
 | 
						|
        初始化日志
 | 
						|
        """
 | 
						|
        sr = self.get_serializer(data=request.data)
 | 
						|
        sr.is_valid(raise_exception=True)
 | 
						|
        ins = sr.save()
 | 
						|
        return Response(MlogSerializer(ins).data)
 | 
						|
    
 | 
						|
    @action(methods=['post'], detail=True, perms_map={'post': 'mlog.update'}, serializer_class=MlogChangeSerializer)
 | 
						|
    def change(self, request, *args, **kwargs):
 | 
						|
        """修改日志
 | 
						|
 | 
						|
        修改日志
 | 
						|
        """
 | 
						|
        ins = self.get_object()
 | 
						|
        if ins.ticket and ins.ticket.state != State.STATE_TYPE_START:
 | 
						|
            raise ParseError('该日志在审批中不可修改!')
 | 
						|
        sr = MlogChangeSerializer(instance=ins, data=request.data)
 | 
						|
        sr.is_valid(raise_exception=True)
 | 
						|
        sr.save()
 | 
						|
        return Response(MlogSerializer(ins).data)
 | 
						|
 | 
						|
    @action(methods=['post'], detail=True, perms_map={'post': 'mlog.submit'}, serializer_class=Serializer)
 | 
						|
    def submit(self, request, *args, **kwargs):
 | 
						|
        """日志提交(变动车间库存)
 | 
						|
 | 
						|
        日志提交
 | 
						|
        """
 | 
						|
        ins: Mlog = self.get_object()
 | 
						|
        now = timezone.now()
 | 
						|
        if ins.ticket:
 | 
						|
            raise ParseError('该日志存在审批!')
 | 
						|
        else:
 | 
						|
            p: Process = ins.mgroup.process
 | 
						|
            if p.mlog_need_ticket:
 | 
						|
                raise ParseError('该日志需要审批!')
 | 
						|
        mlog_submit_validate(ins)
 | 
						|
        with transaction.atomic():
 | 
						|
            mlog_submit(ins, self.request.user, now)
 | 
						|
            vdata_new = MlogSerializer(ins).data
 | 
						|
            # create_auditlog('submit', ins, vdata_new,
 | 
						|
            #                 vdata_old, now, self.request.user)
 | 
						|
        return Response(vdata_new)
 | 
						|
 | 
						|
    @action(methods=['post'], detail=True, perms_map={'post': 'mlog.submit'}, serializer_class=MlogRevertSerializer)
 | 
						|
    def revert(self, request, *args, **kwargs):
 | 
						|
        """撤回日志提交
 | 
						|
 | 
						|
        撤回日志提交
 | 
						|
        """
 | 
						|
        ins: Mlog = self.get_object()
 | 
						|
        if ins.ticket:
 | 
						|
            raise ParseError('该日志存在审批!')
 | 
						|
        user = request.user
 | 
						|
        if ins.submit_time is None:
 | 
						|
            raise ParseError('日志未提交不可撤销')
 | 
						|
        if user != ins.submit_user:
 | 
						|
            raise ParseError('非提交人不可撤销!')
 | 
						|
        now = timezone.now()
 | 
						|
        with transaction.atomic():
 | 
						|
            mlog_revert(ins, user, now)
 | 
						|
            # create_auditlog('revert', ins, {}, {}, now, user,
 | 
						|
            #             request.data.get('change_reason', ''))
 | 
						|
        return Response(MlogSerializer(instance=ins).data)
 | 
						|
 | 
						|
    @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=MlogRelatedSerializer)
 | 
						|
    @transaction.atomic
 | 
						|
    def related_first(self, request, *args, **kwargs):
 | 
						|
        """获取相关任务的第一道工序日志
 | 
						|
 | 
						|
        获取相关任务的第一道工序日志
 | 
						|
        """
 | 
						|
        sr = MlogRelatedSerializer(data=request.data)
 | 
						|
        sr.is_valid(raise_exception=True)
 | 
						|
        vdata = sr.validated_data
 | 
						|
        mtask = vdata['mtask']
 | 
						|
        if mtask.utask:
 | 
						|
            mtasks = mtask.related
 | 
						|
            mlogs = Mlog.objects.filter(mtask__in=mtasks).order_by(
 | 
						|
                'mtask__mgroup__process__sort', 'batch', 'create_time')
 | 
						|
        data = MlogSerializer(instance=mlogs, many=True).data
 | 
						|
        res_data = []
 | 
						|
        for ind, val in enumerate(data):
 | 
						|
            if ind == 0:
 | 
						|
                res_data.append(val)
 | 
						|
            else:
 | 
						|
                before = data[ind-1]
 | 
						|
                if val['batch'] != before['batch']:
 | 
						|
                    res_data.append(val)
 | 
						|
        return Response(res_data)
 | 
						|
 | 
						|
    @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=MlogAnaSerializer)
 | 
						|
    def ana(self, request):
 | 
						|
        """核心统计数据
 | 
						|
 | 
						|
        核心统计数据
 | 
						|
        """
 | 
						|
        sr = MlogAnaSerializer(data=request.data)
 | 
						|
        sr.is_valid(raise_exception=True)
 | 
						|
        vdata = sr.validated_data
 | 
						|
        mlogs = Mlog.objects.exclude(submit_time=None)
 | 
						|
        if vdata.get('belong_dept_name', ''):
 | 
						|
            mlogs = mlogs.filter(
 | 
						|
                mgroup__belong_dept__name=vdata['belong_dept_name'])
 | 
						|
        if vdata.get('material_cate', ''):
 | 
						|
            mlogs = mlogs.filter(
 | 
						|
                material_out__cate=vdata['material_cate'])
 | 
						|
        if vdata.get('start_date', ''):
 | 
						|
            mlogs = mlogs.filter(handle_date__gte=vdata['start_date'])
 | 
						|
        if vdata.get('end_date', ''):
 | 
						|
            mlogs = mlogs.filter(handle_date__lte=vdata['end_date'])
 | 
						|
        res = mlogs.aggregate(
 | 
						|
            count_real=Sum('count_real'),
 | 
						|
            count_ok=Sum('count_ok'),
 | 
						|
            count_notok=Sum('count_notok'),
 | 
						|
            count_n_zw=Sum('count_n_zw'),
 | 
						|
            count_n_tw=Sum('count_n_tw'),
 | 
						|
            count_n_qp=Sum('count_n_qp'),
 | 
						|
            count_n_wq=Sum('count_n_wq'),
 | 
						|
            count_n_dl=Sum('count_n_dl'),
 | 
						|
            count_n_pb=Sum('count_n_pb'),
 | 
						|
            count_n_dxt=Sum('count_n_dxt'),
 | 
						|
            count_n_jsqx=Sum('count_n_jsqx'),
 | 
						|
            count_n_qt=Sum('count_n_qt'))
 | 
						|
        for i in res:
 | 
						|
            if res[i] is None:
 | 
						|
                res[i] = 0
 | 
						|
        return Response(res)
 | 
						|
 | 
						|
 | 
						|
class HandoverViewSet(CustomModelViewSet):
 | 
						|
    """
 | 
						|
    list: 交接记录
 | 
						|
 | 
						|
    交接记录
 | 
						|
    """
 | 
						|
    queryset = Handover.objects.all()
 | 
						|
    serializer_class = HandoverSerializer
 | 
						|
    select_related_fields = ['send_user', 'send_mgroup', 'send_dept', 'recive_user', 'recive_mgroup', 'recive_dept', 'wm']
 | 
						|
    filterset_class = HandoverFilter
 | 
						|
    search_fields = ['id', 'material__name',
 | 
						|
                    'material__number', 'material__specification', 'batch', 'material__model']
 | 
						|
 | 
						|
    def perform_destroy(self, instance):
 | 
						|
        user = self.request.user
 | 
						|
        if instance.submit_time is not None:
 | 
						|
            raise ParseError('日志已提交不可变动')
 | 
						|
        if instance.send_user != user and instance.recive_user != user and instance.create_by != user:
 | 
						|
            raise ParseError('非交送人和接收人不可删除该记录')
 | 
						|
        return super().perform_destroy(instance)
 | 
						|
 | 
						|
    @action(methods=['post'], detail=True, perms_map={'post': 'handover.submit'}, serializer_class=Serializer)
 | 
						|
    @transaction.atomic
 | 
						|
    def submit(self, request, *args, **kwargs):
 | 
						|
        """交接记录提交(变动车间库存)
 | 
						|
 | 
						|
        交接记录提交
 | 
						|
        """
 | 
						|
        ins: Handover = self.get_object()
 | 
						|
        user: User = self.request.user
 | 
						|
        if ins.type != Handover.H_SCRAP:
 | 
						|
            if ins.recive_user is None or user == ins.recive_user or user.belong_dept == ins.recive_user.belong_dept:
 | 
						|
                pass
 | 
						|
            else:    
 | 
						|
                raise ParseError('非接收人不可提交')
 | 
						|
        if ins.submit_time is None:
 | 
						|
            handover_submit(ins, user, None)
 | 
						|
        return Response()
 | 
						|
 | 
						|
    @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=HandoverMgroupSerializer)
 | 
						|
    @transaction.atomic
 | 
						|
    def mgroups(self, request, *args, **kwargs):
 | 
						|
        """获取可交接到的工段
 | 
						|
 | 
						|
        获取可交接到的工段
 | 
						|
        """
 | 
						|
        sr = HandoverMgroupSerializer(data=request.data)
 | 
						|
        sr.is_valid(raise_exception=True)
 | 
						|
        vdata = sr.validated_data
 | 
						|
        materialInId = vdata['material']
 | 
						|
        type = vdata['type']
 | 
						|
        m_qs = Mgroup.objects.all()
 | 
						|
        if type in [Handover.H_NORMAL, Handover.H_CHANGE]:
 | 
						|
            m_qs = m_qs.filter(process__route_p__routepack__state=RoutePack.RP_S_CONFIRM)
 | 
						|
            m_qs = m_qs.filter(
 | 
						|
                process__route_p__material_in__id=materialInId)|m_qs.filter(
 | 
						|
                    process__route_p__routemat_route__material__id=materialInId
 | 
						|
                )
 | 
						|
        elif type in [Handover.H_SCRAP]:
 | 
						|
            m_qs = m_qs.filter(process=None)
 | 
						|
        return Response(list(m_qs.values('id', 'name').distinct()))
 | 
						|
        
 | 
						|
    @action(methods=['post'], detail=False, perms_map={'post': 'handover.create'}, serializer_class=GenHandoverWmSerializer)
 | 
						|
    @transaction.atomic
 | 
						|
    def gen_by_wm(self, request):
 | 
						|
        """从车间库存生成交接记录(废弃)
 | 
						|
 | 
						|
        从车间库存生成交接记录
 | 
						|
        """
 | 
						|
        sr = GenHandoverWmSerializer(data=request.data)
 | 
						|
        sr.is_valid(raise_exception=True)
 | 
						|
        vdata = sr.validated_data
 | 
						|
        user = request.user
 | 
						|
        send_date, send_mgroup, send_user, recive_dept, recive_user, wm, count = vdata['send_date'], vdata['send_mgroup'], vdata[
 | 
						|
            'send_user'], vdata['recive_dept'], vdata['recive_user'], vdata['wm'], vdata['count']
 | 
						|
        if send_mgroup.belong_dept != wm.belong_dept:
 | 
						|
            raise ParseError('送料工段错误!')
 | 
						|
        handover = Handover.objects.create(
 | 
						|
            send_date=send_date,
 | 
						|
            send_user=send_user,
 | 
						|
            recive_dept=recive_dept,
 | 
						|
            recive_user=recive_user,
 | 
						|
            send_mgroup=send_mgroup,
 | 
						|
            send_dept=wm.belong_dept,
 | 
						|
            batch=wm.batch,
 | 
						|
            material=wm.material,
 | 
						|
            count=count,
 | 
						|
            wm=wm,
 | 
						|
            create_by=user
 | 
						|
        )
 | 
						|
        return Response({'handover': handover.id})
 | 
						|
 | 
						|
    @action(methods=['post'], detail=False, perms_map={'post': 'handover.create'}, serializer_class=GenHandoverSerializer)
 | 
						|
    @transaction.atomic
 | 
						|
    def gen_by_mlog(self, request):
 | 
						|
        """从生产日志生成交接记录(废弃)
 | 
						|
 | 
						|
        从生产日志生成交接记录
 | 
						|
        """
 | 
						|
        sr = GenHandoverSerializer(data=request.data)
 | 
						|
        sr.is_valid(raise_exception=True)
 | 
						|
        vdata = sr.validated_data
 | 
						|
        user = request.user
 | 
						|
        send_date, send_user, recive_dept, recive_user = vdata['send_date'], vdata[
 | 
						|
            'send_user'], vdata['recive_dept'], vdata['recive_user']
 | 
						|
        for mlog in vdata['mlogs']:
 | 
						|
            Handover.objects.create(
 | 
						|
                send_date=send_date,
 | 
						|
                send_user=send_user,
 | 
						|
                recive_dept=recive_dept,
 | 
						|
                recive_user=recive_user,
 | 
						|
                send_dept=mlog.mgroup.belong_dept,
 | 
						|
                batch=mlog.batch,
 | 
						|
                material=mlog.material_out,
 | 
						|
                count=mlog.count_real,
 | 
						|
                count_eweight=mlog.count_real_eweight,
 | 
						|
                mlog=mlog,
 | 
						|
                send_mgroup=mlog.mgroup,
 | 
						|
                create_by=user
 | 
						|
            )
 | 
						|
        return Response()
 | 
						|
 | 
						|
 | 
						|
class AttlogViewSet(CustomModelViewSet):
 | 
						|
    """
 | 
						|
    list: 车间到岗
 | 
						|
 | 
						|
    车间到岗
 | 
						|
    """
 | 
						|
    queryset = AttLog.objects.all()
 | 
						|
    serializer_class = AttLogSerializer
 | 
						|
    select_related_fields = ['user', 'post', 'sflog']
 | 
						|
    filterset_fields = ['sflog__mgroup',
 | 
						|
                        'sflog__mgroup__belong_dept__name', 'sflog__work_date', 'sflog__mgroup__cate', 'sflog__mgroup__need_enm']
 | 
						|
    ordering = ['-sflog__work_date', 'create_time']
 | 
						|
 | 
						|
 | 
						|
class OtherLogViewSet(CustomModelViewSet):
 | 
						|
    """
 | 
						|
    list: 其他生产记录
 | 
						|
 | 
						|
    其他生产记录
 | 
						|
    """
 | 
						|
    queryset = OtherLog.objects.all()
 | 
						|
    serializer_class = OtherLogSerializer
 | 
						|
    filterset_fields = {
 | 
						|
        "product": ["exact"],
 | 
						|
        "handle_date": ["exact", "gte", "lte"]
 | 
						|
    }
 | 
						|
    search_fields = ['product']
 | 
						|
    ordering = ['-handle_date', '-create_time']
 | 
						|
 | 
						|
 | 
						|
class MlogbViewSet(CustomListModelMixin, CustomGenericViewSet):
 | 
						|
    perms_map = {'get': '*'}
 | 
						|
    queryset = Mlogb.objects.all()
 | 
						|
    serializer_class = MlogbDetailSerializer
 | 
						|
    select_related_fields = ["material_out", "material_in"]
 | 
						|
    filterset_class = MlogbFilter
 | 
						|
    ordering = ["create_time"]
 | 
						|
 | 
						|
 | 
						|
class MlogbInViewSet(CreateModelMixin, UpdateModelMixin, DestroyModelMixin, CustomGenericViewSet):
 | 
						|
    perms_map = {'post': 'mlog.update', 'delete': 'mlog.update', 'put': 'mlog.update'}
 | 
						|
    queryset = Mlogb.objects.filter(material_in__isnull=False)
 | 
						|
    serializer_class = MlogbInSerializer
 | 
						|
    update_serializer_class = MlogbInUpdateSerializer
 | 
						|
 | 
						|
    @transaction.atomic
 | 
						|
    def perform_destroy(self, instance):
 | 
						|
        ins: Mlogb = instance
 | 
						|
        if ins.mlog.submit_time is not None:
 | 
						|
            raise ParseError('生产日志已提交不可编辑')
 | 
						|
        ins.delete()
 | 
						|
 | 
						|
    @transaction.atomic
 | 
						|
    def perform_create(self, serializer):
 | 
						|
        mlogbin: Mlogb = serializer.save()
 | 
						|
        mlog:Mlog = mlogbin.mlog
 | 
						|
        route:Route = mlog.route
 | 
						|
        mtype = route.process.mtype if route else None
 | 
						|
        is_fix = mlog.is_fix
 | 
						|
        qct = mlog.qct
 | 
						|
        # 以及mlogbw
 | 
						|
        material_in:Material = mlogbin.material_in
 | 
						|
        # 如果是返修,则输出和输入相同
 | 
						|
        material_out:Material = mlog.material_out if is_fix is False else material_in
 | 
						|
        if material_out is None:
 | 
						|
            raise ParseError('产物不可为空')
 | 
						|
        # 如果是主要输入物料且是主批次,才需生成输出
 | 
						|
        if route and route.material_in != material_in or mlogbin.parent is not None:
 | 
						|
            return
 | 
						|
 | 
						|
        wm_in: WMaterial = mlogbin.wm_in
 | 
						|
 | 
						|
        if material_in.tracking == Material.MA_TRACKING_SINGLE:
 | 
						|
            if mlogbin.count_use == wm_in.count:  # 自动创建mlogbw
 | 
						|
                for wpr in Wpr.objects.filter(wm=wm_in).order_by("number"):
 | 
						|
                    Mlogbw.objects.get_or_create(wpr=wpr, mlogb=mlogbin, defaults={"number": wpr.number})
 | 
						|
        
 | 
						|
        if qct is None and not (is_fix and mtype == Process.PRO_PROD):
 | 
						|
            mlog.qct = Qct.get(material_out, "process")
 | 
						|
            mlog.save(update_fields = ["qct"])
 | 
						|
 | 
						|
        m_dict = {
 | 
						|
            "mtask": mlogbin.mtask,
 | 
						|
            "mlog": mlog,
 | 
						|
            "material_out": material_out,
 | 
						|
            "batch": mlogbin.batch,
 | 
						|
            "batch_ofrom": wm_in.batch_ofrom, "material_ofrom": wm_in.material_ofrom, 
 | 
						|
            "qct": qct
 | 
						|
        }
 | 
						|
        if mtype == Process.PRO_DIV and material_in.tracking == Material.MA_TRACKING_SINGLE:
 | 
						|
            pass
 | 
						|
        else:
 | 
						|
            m_dict['batch'] = generate_new_batch(mlogbin.batch, mlog)
 | 
						|
 | 
						|
        if mtype == Process.PRO_NORMAL: # 正常  支持批到批, 个到个
 | 
						|
            d_count_real = mlogbin.count_use
 | 
						|
            d_count_ok = mlogbin.count_use
 | 
						|
            mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=
 | 
						|
                update_dict(m_dict, {"count_real": d_count_real, "count_ok": d_count_ok}))
 | 
						|
            mlogbout.count_json = mlogbin.count_json
 | 
						|
            mlogbout.save(update_fields=["count_json"])
 | 
						|
            if material_in.tracking == Material.MA_TRACKING_SINGLE and material_out.tracking == Material.MA_TRACKING_SINGLE:
 | 
						|
                 for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"):
 | 
						|
                     wpr_ = mlogbwin.wpr
 | 
						|
                     Mlogbw.objects.get_or_create(wpr=wpr_, mlogb=mlogbout, defaults={"number": wpr_.number, "mlogbw_from": mlogbwin})
 | 
						|
        elif mtype == Process.PRO_DIV: # 切分 支持批到批,个到个, 个到批
 | 
						|
            div_number = route.div_number
 | 
						|
            if material_in.tracking == Material.MA_TRACKING_SINGLE and material_out.tracking == Material.MA_TRACKING_BATCH:
 | 
						|
                for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"):
 | 
						|
                    m_dict["batch"] = mlogbwin.number
 | 
						|
                    Mlogb.objects.get_or_create(mlogbw_from=mlogbwin, defaults=update_dict(m_dict, {"count_real": div_number, "count_ok": div_number}))
 | 
						|
            elif material_in.tracking == Material.MA_TRACKING_SINGLE and material_out.tracking == Material.MA_TRACKING_SINGLE:
 | 
						|
                d_count_real = mlogbin.count_use * div_number
 | 
						|
                d_count_ok = d_count_real
 | 
						|
                mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict, 
 | 
						|
                                                                                                   {"count_real": d_count_real, "count_ok": d_count_ok}))
 | 
						|
                for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"):
 | 
						|
                    wpr_ = mlogbwin.wpr
 | 
						|
                    for i in range(div_number):
 | 
						|
                        Mlogbw.objects.get_or_create(mlogb=mlogbout, number=f'{wpr_.number}-{i+1}', defaults={"mlogbw_from": mlogbwin})
 | 
						|
            elif material_in.tracking == Material.MA_TRACKING_BATCH and material_out.tracking == Material.MA_TRACKING_BATCH:
 | 
						|
                d_count_real = mlogbin.count_use * div_number
 | 
						|
                d_count_ok = d_count_real
 | 
						|
                mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict,{"count_real": d_count_real, "count_ok": d_count_ok}))
 | 
						|
                mlogbout.count_json = mlogbin.count_json
 | 
						|
                mlogbout.save(update_fields=["count_json"])
 | 
						|
        elif mtype == Process.PRO_MERGE: # 支持批到批
 | 
						|
            xcount = math.floor( mlogbin.count_use / route.div_number)
 | 
						|
            d_count_real = xcount
 | 
						|
            d_count_ok = xcount
 | 
						|
            mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict, {"count_real": d_count_real, "count_ok": d_count_ok}))
 | 
						|
            mlogbout.count_json = mlogbin.count_json
 | 
						|
            mlogbout.save(update_fields=["count_json"])
 | 
						|
        elif is_fix:# 支持批到批,个到个
 | 
						|
            d_count_real = mlogbin.count_use
 | 
						|
            d_count_ok = mlogbin.count_use
 | 
						|
            Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict,{"count_real": d_count_real, "count_ok": d_count_ok}))
 | 
						|
            if material_in.tracking == Material.MA_TRACKING_SINGLE and material_out.tracking == Material.MA_TRACKING_SINGLE:
 | 
						|
                for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"):
 | 
						|
                    wpr_ = mlogbwin.wpr
 | 
						|
                    Mlogbw.objects.get_or_create(wpr=wpr_, mlogb=mlogbout, defaults={"number": wpr_.number, "mlogbw_from": mlogbwin})
 | 
						|
        else:
 | 
						|
            raise ParseError("不支持生成产出物料!")
 | 
						|
 | 
						|
 | 
						|
class MlogbOutViewSet(UpdateModelMixin, CustomGenericViewSet):
 | 
						|
    perms_map = {"put": "mlog.update"}
 | 
						|
    queryset = Mlogb.objects.filter(material_out__isnull=False)
 | 
						|
    serializer_class = MlogbOutUpdateSerializer
 | 
						|
 | 
						|
 | 
						|
class FmlogViewSet(CustomModelViewSet):
 | 
						|
    perms_map = {'get': '*', 'post': 'mlog.create', 'put': 'mlog.update', 'delete': 'mlog.delete'}
 | 
						|
    queryset = Fmlog.objects.all()
 | 
						|
    serializer_class = FmlogSerializer
 | 
						|
    update_serializer_class = FmlogUpdateSerializer
 | 
						|
    filterset_fields = ['mtask', 'mgroup', 'mtask__route', 'enabled']
 | 
						|
    select_related_fields = ['mtask', 'mgroup', 'mtask__route', 'mtask__route__routepack', "mtask__route__material_out"]
 | 
						|
 | 
						|
    def destroy(self, request, *args, **kwargs):
 | 
						|
        ins = self.get_object()
 | 
						|
        if Mlog.objects.filter(fmlog=ins).exists():
 | 
						|
            raise ParseError('因存在二级日志不可删除')
 | 
						|
        return super().destroy(request, *args, **kwargs)
 | 
						|
    
 | 
						|
    @action(methods=['post'], detail=True, perms_map={'post': '*'}, serializer_class=Serializer)
 | 
						|
    def toggle_enabled(self, request, *args, **kwargs):
 | 
						|
        ins:Fmlog = self.get_object()
 | 
						|
        ins.enabled = False if ins.enabled else True
 | 
						|
        ins.save()
 | 
						|
        return Response()
 | 
						|
        
 | 
						|
class BatchStViewSet(ListModelMixin, CustomGenericViewSet):
 | 
						|
    """
 | 
						|
    list: 批次统计数据
 | 
						|
 | 
						|
    批次统计数据
 | 
						|
    """
 | 
						|
    perms_map = {"get": "*"}
 | 
						|
    queryset = BatchSt.objects.all()
 | 
						|
    serializer_class = BatchStSerializer
 | 
						|
    ordering_fields = ["batch", "last_time", "update_time"]
 | 
						|
    ordering = ["batch"]
 | 
						|
    filterset_class = BatchStFilter
 | 
						|
 | 
						|
 | 
						|
class MlogbwViewSet(CustomModelViewSet):
 | 
						|
    perms_map = {"get": "*", "post": "mlog.update", "put": "mlog.update", "delete": "mlog.update"}
 | 
						|
    queryset = Mlogbw.objects.all()
 | 
						|
    serializer_class = MlogbwCreateUpdateSerializer
 | 
						|
    filterset_fields = ['mlogb']
 | 
						|
    ordering = ["number", "create_time"]
 | 
						|
    ordering_fields = ["number", "create_time"]
 | 
						|
 | 
						|
    def filter_queryset(self, queryset):
 | 
						|
        if not self.detail and not self.request.query_params.get('mlogb', None):
 | 
						|
            raise ParseError('请指定所属消耗/产出明细')
 | 
						|
        return super().filter_queryset(queryset)
 | 
						|
 | 
						|
    @transaction.atomic
 | 
						|
    def perform_create(self, serializer):
 | 
						|
        ins:Mlogbw = serializer.save()
 | 
						|
        route:Route = ins.mlogb.mlog.route
 | 
						|
        mlogb:Mlogb = ins.mlogb
 | 
						|
        Mlogbw.cal_count_notok(mlogb)
 | 
						|
        # 如果是输入且输出追踪到个,需同步创建
 | 
						|
        material_in:Material = mlogb.material_in
 | 
						|
        if material_in is not None:
 | 
						|
            mlogb_qs = Mlogb.objects.filter(mlogb_from=mlogb)
 | 
						|
            material_out:Material = mlogb.mlog.material_out
 | 
						|
            mtype = route.process.mtype if route.process else None
 | 
						|
            if mtype == Process.PRO_DIV:
 | 
						|
                mlogbin = ins.mlogb
 | 
						|
                wm_in = mlogbin.wm_in
 | 
						|
                mlog = mlogbin.mlog
 | 
						|
                div_number = route.div_number
 | 
						|
                m_dict = {
 | 
						|
                "mtask": mlogbin.mtask,
 | 
						|
                "mlog": mlog,
 | 
						|
                "batch":  ins.number,
 | 
						|
                "material_out": material_out,
 | 
						|
                "batch_ofrom": wm_in.batch_ofrom, 
 | 
						|
                "material_ofrom": wm_in.material_ofrom,
 | 
						|
                "count_real": div_number, 
 | 
						|
                "count_ok": div_number, "qct": mlog.qct
 | 
						|
                }
 | 
						|
                mlogbout, _ = Mlogb.objects.get_or_create(mlogbw_from=ins, defaults=m_dict)
 | 
						|
                if material_out.tracking == Material.MA_TRACKING_SINGLE:
 | 
						|
                    for i in range(div_number):
 | 
						|
                        Mlogbw.objects.get_or_create(mlogb=mlogbout, number=f"{ins.number}-{i+1}", defaults={"mlogbw_from": ins})
 | 
						|
                    Mlogbw.cal_count_notok(mlogbout)
 | 
						|
            elif mlogb_qs.exists() and material_out.tracking == Material.MA_TRACKING_SINGLE:
 | 
						|
                for mlogb in mlogb_qs:
 | 
						|
                    if route.process.mtype == Process.PRO_NORMAL:
 | 
						|
                        Mlogbw.objects.get_or_create(mlogb=mlogb, wpr=ins.wpr, defaults={"number": ins.number, "mlogbw_from": ins})
 | 
						|
                    elif route.process.mtype == Process.PRO_DIV:
 | 
						|
                        for i in range(route.div_number):
 | 
						|
                            Mlogbw.objects.get_or_create(mlogb=mlogb,  number=f'{ins.number}-{i+1}', defaults={"mlogbw_from": ins})
 | 
						|
                    Mlogbw.cal_count_notok(mlogb)
 | 
						|
 | 
						|
    @transaction.atomic
 | 
						|
    def perform_update(self, serializer):
 | 
						|
        mlogbw = serializer.save()
 | 
						|
        Mlogbw.cal_count_notok(mlogbw.mlogb)
 | 
						|
 | 
						|
    @transaction.atomic
 | 
						|
    def perform_destroy(self, instance:Mlogbw):
 | 
						|
        mlogb:Mlogb = instance.mlogb
 | 
						|
        if mlogb.material_out is not None and instance.wpr is not None:
 | 
						|
            raise ParseError("不能删除该产出明细")
 | 
						|
        
 | 
						|
        # 如果是输入且输出追踪到个,需同步删除
 | 
						|
        material_in: Material = mlogb.material_in
 | 
						|
        need_cal_mlogb = False
 | 
						|
        if material_in is not None:
 | 
						|
            mlogbw_qs = Mlogbw.objects.filter(mlogbw_from=instance)
 | 
						|
            mlogbIds = list(mlogbw_qs.values_list("mlogb__id", flat=True))
 | 
						|
            if mlogbIds:
 | 
						|
                need_cal_mlogb = True
 | 
						|
 | 
						|
 | 
						|
        ftest = instance.ftest
 | 
						|
        instance.delete()
 | 
						|
        if ftest:
 | 
						|
            ftest.delete()
 | 
						|
        Mlogbw.cal_count_notok(mlogb)
 | 
						|
 | 
						|
        if need_cal_mlogb:
 | 
						|
            mlogb_qs = Mlogb.objects.filter(id__in=mlogbIds)
 | 
						|
            for mlogb in mlogb_qs:
 | 
						|
                Mlogbw.cal_count_notok(mlogb) |