diff --git a/apps/qm/migrations/0054_alter_ptest_val_xj.py b/apps/qm/migrations/0054_alter_ptest_val_xj.py new file mode 100644 index 00000000..6adcff52 --- /dev/null +++ b/apps/qm/migrations/0054_alter_ptest_val_xj.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.12 on 2025-10-10 01:32 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('qm', '0053_alter_ftest_is_ok'), + ] + + operations = [ + migrations.AlterField( + model_name='ptest', + name='val_xj', + field=models.CharField(blank=True, choices=[('S', '析晶'), ('R', '不析晶'), ('θ', '未化')], help_text=[('S', '析晶'), ('R', '不析晶'), ('θ', '未化')], max_length=10, null=True, verbose_name='析晶'), + ), + ] diff --git a/apps/qm/models.py b/apps/qm/models.py index 8e126806..6e524f12 100644 --- a/apps/qm/models.py +++ b/apps/qm/models.py @@ -445,7 +445,7 @@ class Ptest(CommonAModel): val_tg = models.FloatField("Tg", help_text='℃', null=True, blank=True) val_tf = models.FloatField("Tf", help_text='℃', null=True, blank=True) val_xj = models.CharField( - '析晶', max_length=10, default='S', choices=PTEST_XJ_VALS, help_text=list(PTEST_XJ_VALS)) + '析晶', max_length=10, null=True, blank=True, choices=PTEST_XJ_VALS, help_text=list(PTEST_XJ_VALS)) val_pzxs = models.FloatField( '膨胀系数', help_text='30-300℃', null=True, blank=True) val_zgwd = models.FloatField('升至最高温度', null=True, blank=True) diff --git a/apps/wpm/models.py b/apps/wpm/models.py index 1840d37c..f1712de2 100644 --- a/apps/wpm/models.py +++ b/apps/wpm/models.py @@ -524,7 +524,7 @@ class Mlogbw(BaseModel): mlogb.count_real = count count_notok = 0 count_notok_full = 0 - # 个追踪的合格b类不分批 + # 个追踪的合格b类不分开,这里会导致count_ok_full与count_ok一样了,暂时不做处理 tqs = Mlogbw.objects.filter(mlogb=mlogb, ftest__defect_main__isnull=False) tqs_a = Mlogbw.objects.filter(mlogb=mlogb, ftest__defect_main__isnull=False).values("ftest__defect_main").annotate(xcount=Count('id')) defect_ids = tqs.values_list("ftest__defect_main", flat=True) diff --git a/apps/wpm/views.py b/apps/wpm/views.py index c601d2a3..2fb9cbf8 100644 --- a/apps/wpm/views.py +++ b/apps/wpm/views.py @@ -14,19 +14,42 @@ from apps.utils.viewsets import CustomGenericViewSet, CustomModelViewSet from apps.utils.mixins import CustomListModelMixin, BulkCreateModelMixin, ComplexQueryMixin, BulkDestroyModelMixin, BulkUpdateModelMixin from .filters import StLogFilter, SfLogFilter, WMaterialFilter, MlogFilter, HandoverFilter, MlogbFilter, BatchStFilter, MlogbwFilter -from .models import (SfLog, SfLogExp, StLog, WMaterial, Mlog, Handover, Mlogb, - Mlogbw, AttLog, OtherLog, Fmlog, BatchSt, MlogbDefect, MlogUser, BatchLog, Handoverb) -from .serializers import (SflogExpSerializer, SfLogSerializer, StLogSerializer, WMaterialSerializer, - MlogRevertSerializer, - MlogSerializer, MlogRelatedSerializer, DeptBatchSerializer, HandoverSerializer, - HandoverUpdateSerializer, - GenHandoverSerializer, GenHandoverWmSerializer, MlogAnaSerializer, - AttLogSerializer, OtherLogSerializer, MlogInitSerializer, MlogChangeSerializer, - MlogbDetailSerializer, MlogbInSerializer, MlogbInUpdateSerializer, - MlogbOutUpdateSerializer, FmlogSerializer, FmlogUpdateSerializer, BatchStSerializer, - MlogbwCreateUpdateSerializer, HandoverMgroupSerializer, MlogListSerializer, - MlogbSerializer, MlogUserSerializer, BatchLogSerializer, MlogQuickSerializer, - MlogbwStartTestSerializer, HandoverListSerializer) +from .models import SfLog, SfLogExp, StLog, WMaterial, Mlog, Handover, Mlogb, Mlogbw, AttLog, OtherLog, Fmlog, BatchSt, MlogbDefect, MlogUser, BatchLog, Handoverb +from .serializers import ( + SflogExpSerializer, + SfLogSerializer, + StLogSerializer, + WMaterialSerializer, + MlogRevertSerializer, + MlogSerializer, + MlogRelatedSerializer, + DeptBatchSerializer, + HandoverSerializer, + HandoverUpdateSerializer, + GenHandoverSerializer, + GenHandoverWmSerializer, + MlogAnaSerializer, + AttLogSerializer, + OtherLogSerializer, + MlogInitSerializer, + MlogChangeSerializer, + MlogbDetailSerializer, + MlogbInSerializer, + MlogbInUpdateSerializer, + MlogbOutUpdateSerializer, + FmlogSerializer, + FmlogUpdateSerializer, + BatchStSerializer, + MlogbwCreateUpdateSerializer, + HandoverMgroupSerializer, + MlogListSerializer, + MlogbSerializer, + MlogUserSerializer, + BatchLogSerializer, + MlogQuickSerializer, + MlogbwStartTestSerializer, + HandoverListSerializer, +) from .services import mlog_submit, handover_submit, mlog_revert, get_batch_dag, handover_revert from apps.wpm.services import mlog_submit_validate, generate_new_batch from apps.wf.models import State, Ticket @@ -53,11 +76,12 @@ class StLogViewSet(CustomModelViewSet): 异常记录 """ + queryset = StLog.objects.all() serializer_class = StLogSerializer - select_related_fields = ['mgroup'] + select_related_fields = ["mgroup"] filterset_class = StLogFilter - ordering = ['-start_time'] + ordering = ["-start_time"] def destroy(self, request, *args, **kwargs): obj: StLog = self.get_object() @@ -65,7 +89,7 @@ class StLogViewSet(CustomModelViewSet): if obj.duration_sec <= 60: pass else: - raise ParseError('停机记录不可删除') + raise ParseError("停机记录不可删除") return super().destroy(request, *args, **kwargs) @@ -75,25 +99,28 @@ class SfLogViewSet(BulkUpdateModelMixin, CustomListModelMixin, BulkDestroyModelM 值班记录 """ - perms_map = {'get': '*', 'put': 'sflog.update', 'delete': 'sflog.delete'} + + perms_map = {"get": "*", "put": "sflog.update", "delete": "sflog.delete"} queryset = SfLog.objects.all() serializer_class = SfLogSerializer - select_related_fields = ['mgroup', 'shift', 'team', 'leader'] + select_related_fields = ["mgroup", "shift", "team", "leader"] filterset_class = SfLogFilter - search_fields = ['note'] - ordering = ['-start_time'] + search_fields = ["note"] + ordering = ["-start_time"] def add_info_for_list(self, data): - sflogs = [i['id'] for i in data] - sflogs_dict = EnStat.objects.filter(sflog_id__in=sflogs, type='sflog').values('sflog_id', 'total_production','qua_data','production_hour','elec_consume_unit','run_rate','production_cost_unit') + sflogs = [i["id"] for i in data] + sflogs_dict = EnStat.objects.filter(sflog_id__in=sflogs, type="sflog").values( + "sflog_id", "total_production", "qua_data", "production_hour", "elec_consume_unit", "run_rate", "production_cost_unit" + ) # 创建索引 - sflogs_dict = {x['sflog_id']: x for x in sflogs_dict} + sflogs_dict = {x["sflog_id"]: x for x in sflogs_dict} for item in data: - if item['id'] in sflogs_dict: - item.update({"enstat_info": sflogs_dict[item['id']]}) + if item["id"] in sflogs_dict: + item.update({"enstat_info": sflogs_dict[item["id"]]}) return data - - @action(methods=['get'], detail=True, perms_map={'get': '*'}) + + @action(methods=["get"], detail=True, perms_map={"get": "*"}) def init_test(self, request, pk=None): """ 初始化检测录入 @@ -102,34 +129,31 @@ class SfLogViewSet(BulkUpdateModelMixin, CustomListModelMixin, BulkDestroyModelM """ from apps.qm.models import QuaStat, TestItem from apps.qm.serializers import QuaStatSerializer + obj = self.get_object() - test_materials = Material.objects.filter( - id__in=obj.mgroup.test_materials).order_by('sort', '-create_time') + test_materials = Material.objects.filter(id__in=obj.mgroup.test_materials).order_by("sort", "-create_time") for material in test_materials: - testitems = TestItem.objects.filter( - id__in=material.testitems).order_by('sort', '-create_time') + testitems = TestItem.objects.filter(id__in=material.testitems).order_by("sort", "-create_time") for testitem in testitems: - params = {'material': material, - 'testitem': testitem, 'sflog': obj} - QuaStat.objects.get_or_create( - **params, defaults={**params, **{'create_by': request.user, 'belong_dept': obj.mgroup.belong_dept}}) - qs = QuaStat.objects.filter(sflog=obj).order_by( - 'material__sort', 'material__create_time', 'testitem__sort', 'testitem__create_time') + params = {"material": material, "testitem": testitem, "sflog": obj} + QuaStat.objects.get_or_create(**params, defaults={**params, **{"create_by": request.user, "belong_dept": obj.mgroup.belong_dept}}) + qs = QuaStat.objects.filter(sflog=obj).order_by("material__sort", "material__create_time", "testitem__sort", "testitem__create_time") sr = QuaStatSerializer(instance=qs, many=True) return Response(sr.data) - - + + class SfLogExpViewSet(CustomListModelMixin, BulkUpdateModelMixin, CustomGenericViewSet): """ list:异常值班处理 异常值班处理 """ - perms_map = {'get': '*', 'put': 'sflogexp.update'} + + perms_map = {"get": "*", "put": "sflogexp.update"} queryset = SfLogExp.objects.all() serializer_class = SflogExpSerializer - select_related_fields = ['sflog', 'sflog__mgroup', 'stlog', 'sflog__team', 'sflog__shift', 'sflog__leader', 'stlog'] - filterset_fields = ['sflog', 'stlog'] + select_related_fields = ["sflog", "sflog__mgroup", "stlog", "sflog__team", "sflog__shift", "sflog__leader", "stlog"] + filterset_fields = ["sflog", "stlog"] class WMaterialViewSet(CustomListModelMixin, CustomGenericViewSet): @@ -138,22 +162,22 @@ class WMaterialViewSet(CustomListModelMixin, CustomGenericViewSet): 车间库存 """ - perms_map = {'get': '*'} + + perms_map = {"get": "*"} queryset = WMaterial.objects.filter(count__gt=0) serializer_class = WMaterialSerializer - select_related_fields = ['material', 'belong_dept', 'material__process', 'supplier'] - search_fields = ['material__name', - 'material__number', 'material__specification', 'batch', 'material__model', "defect__name", "notok_sign"] + select_related_fields = ["material", "belong_dept", "material__process", "supplier"] + search_fields = ["material__name", "material__number", "material__specification", "batch", "material__model", "defect__name", "notok_sign"] filterset_class = WMaterialFilter ordering_fields = ["update_time", "state", "count", "count_xtest"] def filter_queryset(self, queryset): queryset = super().filter_queryset(queryset) - if self.request.query_params.get('state_all'): + if self.request.query_params.get("state_all"): return queryset return queryset.exclude(state=WMaterial.WM_SCRAP) - @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=DeptBatchSerializer) + @action(methods=["post"], detail=False, perms_map={"post": "*"}, serializer_class=DeptBatchSerializer) def batchs(self, request): """获取车间的批次号(废弃) @@ -162,11 +186,10 @@ class WMaterialViewSet(CustomListModelMixin, CustomGenericViewSet): sr = DeptBatchSerializer(data=request.data) sr.is_valid(raise_exception=True) vdata = sr.validated_data - batchs = WMaterial.objects.filter( - belong_dept__name=vdata['belong_dept_name'], count__gt=0).values_list('batch', flat=True).distinct() + batchs = WMaterial.objects.filter(belong_dept__name=vdata["belong_dept_name"], count__gt=0).values_list("batch", flat=True).distinct() return Response(list(batchs)) - @action(methods=['get'], detail=False, perms_map={'get': '*'}) + @action(methods=["get"], detail=False, perms_map={"get": "*"}) def defects(self, request, *args, **kwargs): """获取物料的缺陷列表 @@ -174,97 +197,111 @@ class WMaterialViewSet(CustomListModelMixin, CustomGenericViewSet): """ from apps.qm.models import Defect from apps.qm.serializers import DefectSerializer + queryset = self.filter_queryset(self.get_queryset()) - defect_ids = queryset.values_list('defect', flat=True).distinct() + defect_ids = queryset.values_list("defect", flat=True).distinct() return Response(DefectSerializer(Defect.objects.filter(id__in=defect_ids), many=True).data) + class MlogViewSet(CustomModelViewSet): """ list: 生产日志 生产日志 """ + queryset = Mlog.objects.all() serializer_class = MlogSerializer list_serializer_class = MlogListSerializer - select_related_fields = ["create_by", "update_by", "handle_user", "equipment", "mgroup", "material_in", "material_out", - 'material_in__process', 'material_out__process', "mgroup__process", "submit_user"] + select_related_fields = [ + "create_by", + "update_by", + "handle_user", + "equipment", + "mgroup", + "material_in", + "material_out", + "material_in__process", + "material_out__process", + "mgroup__process", + "submit_user", + ] # select_related_fields = ['create_by', 'update_by', 'mtask', 'mtaskb', 'mgroup', # 'handle_user', 'handle_user_2', 'equipment', 'mgroup__belong_dept', # 'equipment_2', 'material_in', 'material_out', 'route__routepack', 'submit_user', - # 'supplier', 'ticket', 'mgroup__process', 'test_user', 'handle_leader', 'test_user', 'team', + # 'supplier', 'ticket', 'mgroup__process', 'test_user', 'handle_leader', 'test_user', 'team', # 'material_in__process', 'material_out__process', "shift", "route"] - prefetch_related_fields = ['handle_users', 'material_outs', 'equipments'] + prefetch_related_fields = ["handle_users", "material_outs", "equipments"] filterset_class = MlogFilter - search_fields = ['material_in__name', - 'material_in__number', 'material_in__specification', 'batch', 'material_in__model', - 'material_out__name', 'material_out__number', 'material_out__specification', 'material_out__model', "b_mlog__batch"] + search_fields = [ + "material_in__name", + "material_in__number", + "material_in__specification", + "batch", + "material_in__model", + "material_out__name", + "material_out__number", + "material_out__specification", + "material_out__model", + "b_mlog__batch", + ] ordering_fields = ["create_time", "update_time"] def add_info_for_item(self, data): if data.get("oinfo_json", {}): - czx_dict = dict(TestItem.objects.filter(id__in=data.get("oinfo_json", {}).keys()).values_list('id', 'name')) + czx_dict = dict(TestItem.objects.filter(id__in=data.get("oinfo_json", {}).keys()).values_list("id", "name")) data["oinfo_json_"] = {czx_dict.get(k, k): v for k, v in data.get("oinfo_json", {}).items()} return data @classmethod - def lock_and_check_can_update(cls, mlog:Mlog): + def lock_and_check_can_update(cls, mlog: Mlog): if not connection.in_atomic_block: raise ParseError("请在事务中调用该方法") - mlog_lock:Mlog = Mlog.objects.select_for_update().get(id=mlog.id) + mlog_lock: Mlog = Mlog.objects.select_for_update().get(id=mlog.id) if mlog.submit_time is not None: raise ParseError("该记录已提交无法更改") return mlog_lock - def get_serializer_class(self): - if self.request.query_params.get('with_mlogb', False): + if self.request.query_params.get("with_mlogb", False): return MlogSerializer return super().get_serializer_class() - @swagger_auto_schema(manual_parameters=[ - openapi.Parameter(name="query", in_=openapi.IN_QUERY, description="定制返回数据", - type=openapi.TYPE_STRING, required=False), - openapi.Parameter(name="with_children", in_=openapi.IN_QUERY, description="带有children(yes/no/count)", - type=openapi.TYPE_STRING, required=False), - openapi.Parameter(name="with_mlogbw", in_=openapi.IN_QUERY, description="带有个列表(out)", - type=openapi.TYPE_STRING, required=False), - ]) + @swagger_auto_schema( + manual_parameters=[ + openapi.Parameter(name="query", in_=openapi.IN_QUERY, description="定制返回数据", type=openapi.TYPE_STRING, required=False), + openapi.Parameter(name="with_children", in_=openapi.IN_QUERY, description="带有children(yes/no/count)", type=openapi.TYPE_STRING, required=False), + openapi.Parameter(name="with_mlogbw", in_=openapi.IN_QUERY, description="带有个列表(out)", type=openapi.TYPE_STRING, required=False), + ] + ) def list(self, request, *args, **kwargs): from django.db import connection from django.conf import settings + # 清空之前的查询记录 connection.queries_log.clear() - + # 调用父类的 list 方法 response = super().list(request, *args, **kwargs) - + # 获取执行的 SQL 查询 queries = connection.queries sql_queries = [] - + for query in queries: - sql_queries.append({ - 'sql': query['sql'], - 'time': query['time'], - 'params': query.get('params', []) - }) - + sql_queries.append({"sql": query["sql"], "time": query["time"], "params": query.get("params", [])}) + # 将 SQL 信息添加到响应中(仅在调试模式) if settings.DEBUG: - response.data['_debug'] = { - 'sql_queries': sql_queries, - 'total_queries': len(queries), - 'total_time': sum(float(q['time']) for q in queries) - } - + response.data["_debug"] = {"sql_queries": sql_queries, "total_queries": len(queries), "total_time": sum(float(q["time"]) for q in queries)} + return response - + def add_info_for_list(self, data): czx_dict = {} for item in data: czx_dict.update(item.get("oinfo_json", {})) - czx_dict = dict(TestItem.objects.filter(id__in=czx_dict.keys()).values_list('id', 'name')) + czx_dict = dict(TestItem.objects.filter(id__in=czx_dict.keys()).values_list("id", "name")) for item in data: if item.get("oinfo_json", None): item["oinfo_json_"] = {czx_dict.get(k, k): v for k, v in item.get("oinfo_json", {}).items()} @@ -279,41 +316,40 @@ class MlogViewSet(CustomModelViewSet): # if item.get("material_out", None): # data_dict[item_dict["mlog"]]["mlogb"].append(item_dict) # data = list(data_dict.values()) - if self.request.query_params.get('with_mlogbw', False) == 'out': + if self.request.query_params.get("with_mlogbw", False) == "out": wpr_dict = {item["id"]: [] for item in data} - wpr_out_qs = Mlogbw.objects.filter(mlogb__mlog__id__in=wpr_dict.keys(), - mlogb__material_out__isnull=False).values('mlogb__mlog__id', 'number') + wpr_out_qs = Mlogbw.objects.filter(mlogb__mlog__id__in=wpr_dict.keys(), mlogb__material_out__isnull=False).values("mlogb__mlog__id", "number") for item in wpr_out_qs: wpr_dict[item["mlogb__mlog__id"]].append(item["number"]) - + for item in data: item["mlogbw_number_list"] = wpr_dict.get(item["id"], None) return data def perform_destroy(self, instance): if instance.submit_time is not None: - raise ParseError('日志已提交不可变动') + raise ParseError("日志已提交不可变动") if instance.ticket and instance.ticket.state != State.STATE_TYPE_START: - raise ParseError('该日志存在审批!') + raise ParseError("该日志存在审批!") # delete_auditlog(instance, instance.id) if instance.ticket: instance.ticket.delete() - ftestIds = list(Ftest.objects.filter(mlogbw_ftest__mlogb__mlog=instance).values_list('id', flat=True)) + ftestIds = list(Ftest.objects.filter(mlogbw_ftest__mlogb__mlog=instance).values_list("id", flat=True)) instance.delete() Ftest.objects.filter(id__in=ftestIds).delete() def perform_update(self, serializer): ins = serializer.instance if ins.ticket and ins.ticket.state != State.STATE_TYPE_START: - raise ParseError('该日志在审批中不可修改!') + raise ParseError("该日志在审批中不可修改!") if ins.submit_time is not None: - raise ParseError('该日志已提交!') + raise ParseError("该日志已提交!") # val_old = MlogSerializer(instance=ins).data serializer.save() # val_new = MlogSerializer(instance=ins).data # create_auditlog('update', ins, val_new, val_old) - @action(methods=['post'], detail=False, perms_map={'post': 'mlog.init'}, serializer_class=MlogInitSerializer) + @action(methods=["post"], detail=False, perms_map={"post": "mlog.init"}, serializer_class=MlogInitSerializer) def init(self, request, *args, **kwargs): """初始化日志 @@ -323,10 +359,8 @@ class MlogViewSet(CustomModelViewSet): sr.is_valid(raise_exception=True) ins = sr.save() return Response(MlogSerializer(ins).data) - - @action(methods=['post'], detail=True, - perms_map={'post': 'mlog.update'}, - serializer_class=MlogChangeSerializer) + + @action(methods=["post"], detail=True, perms_map={"post": "mlog.update"}, serializer_class=MlogChangeSerializer) @transaction.atomic def change(self, request, *args, **kwargs): """修改日志 @@ -335,16 +369,15 @@ class MlogViewSet(CustomModelViewSet): """ ins = self.get_object() if ins.submit_time is not None: - raise ParseError('该日志已提交!') + raise ParseError("该日志已提交!") if ins.ticket and ins.ticket.state != State.STATE_TYPE_START: - raise ParseError('该日志在审批中不可修改!') + raise ParseError("该日志在审批中不可修改!") sr = MlogChangeSerializer(instance=ins, data=request.data, partial=True) sr.is_valid(raise_exception=True) sr.save() return Response(MlogSerializer(ins).data) - @action(methods=['post'], detail=True, perms_map={'post': 'mlog.submit'}, - serializer_class=Serializer) + @action(methods=["post"], detail=True, perms_map={"post": "mlog.submit"}, serializer_class=Serializer) @transaction.atomic def submit(self, request, *args, **kwargs): """日志提交(变动车间库存) @@ -354,17 +387,17 @@ class MlogViewSet(CustomModelViewSet): ins: Mlog = self.get_object() now = timezone.now() if ins.ticket: - raise ParseError('该日志存在审批!') + raise ParseError("该日志存在审批!") else: p: Process = ins.mgroup.process if p.mlog_need_ticket: - raise ParseError('该日志需要审批!') + raise ParseError("该日志需要审批!") mlog_submit_validate(ins) mlog_submit(ins, self.request.user, now) vdata_new = MlogSerializer(ins).data return Response(vdata_new) - @action(methods=['post'], detail=True, perms_map={'post': 'mlog.submit'}, serializer_class=MlogRevertSerializer) + @action(methods=["post"], detail=True, perms_map={"post": "mlog.submit"}, serializer_class=MlogRevertSerializer) @transaction.atomic def revert(self, request, *args, **kwargs): """撤回日志提交 @@ -373,17 +406,17 @@ class MlogViewSet(CustomModelViewSet): """ ins: Mlog = self.get_object() if ins.ticket: - raise ParseError('该日志存在审批!') + raise ParseError("该日志存在审批!") user = request.user if ins.submit_time is None: - raise ParseError('日志未提交不可撤销') + raise ParseError("日志未提交不可撤销") if user != ins.submit_user: - raise ParseError('非提交人不可撤销!') + raise ParseError("非提交人不可撤销!") now = timezone.now() mlog_revert(ins, user, now) return Response(MlogSerializer(instance=ins).data) - @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=MlogRelatedSerializer) + @action(methods=["post"], detail=False, perms_map={"post": "*"}, serializer_class=MlogRelatedSerializer) def related_first(self, request, *args, **kwargs): """获取相关任务的第一道工序日志 @@ -392,23 +425,22 @@ class MlogViewSet(CustomModelViewSet): sr = MlogRelatedSerializer(data=request.data) sr.is_valid(raise_exception=True) vdata = sr.validated_data - mtask = vdata['mtask'] + mtask = vdata["mtask"] if mtask.utask: mtasks = mtask.related - mlogs = Mlog.objects.filter(mtask__in=mtasks).order_by( - 'mtask__mgroup__process__sort', 'batch', 'create_time') + mlogs = Mlog.objects.filter(mtask__in=mtasks).order_by("mtask__mgroup__process__sort", "batch", "create_time") data = MlogSerializer(instance=mlogs, many=True).data res_data = [] for ind, val in enumerate(data): if ind == 0: res_data.append(val) else: - before = data[ind-1] - if val['batch'] != before['batch']: + before = data[ind - 1] + if val["batch"] != before["batch"]: res_data.append(val) return Response(res_data) - @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=MlogAnaSerializer) + @action(methods=["post"], detail=False, perms_map={"post": "*"}, serializer_class=MlogAnaSerializer) def ana(self, request): """核心统计数据 @@ -418,35 +450,34 @@ class MlogViewSet(CustomModelViewSet): sr.is_valid(raise_exception=True) vdata = sr.validated_data mlogs = Mlog.objects.exclude(submit_time=None) - if vdata.get('belong_dept_name', ''): - mlogs = mlogs.filter( - mgroup__belong_dept__name=vdata['belong_dept_name']) - if vdata.get('material_cate', ''): - mlogs = mlogs.filter( - material_out__cate=vdata['material_cate']) - if vdata.get('start_date', ''): - mlogs = mlogs.filter(handle_date__gte=vdata['start_date']) - if vdata.get('end_date', ''): - mlogs = mlogs.filter(handle_date__lte=vdata['end_date']) + if vdata.get("belong_dept_name", ""): + mlogs = mlogs.filter(mgroup__belong_dept__name=vdata["belong_dept_name"]) + if vdata.get("material_cate", ""): + mlogs = mlogs.filter(material_out__cate=vdata["material_cate"]) + if vdata.get("start_date", ""): + mlogs = mlogs.filter(handle_date__gte=vdata["start_date"]) + if vdata.get("end_date", ""): + mlogs = mlogs.filter(handle_date__lte=vdata["end_date"]) res = mlogs.aggregate( - count_real=Sum('count_real'), - count_ok=Sum('count_ok'), - count_notok=Sum('count_notok'), - count_n_zw=Sum('count_n_zw'), - count_n_tw=Sum('count_n_tw'), - count_n_qp=Sum('count_n_qp'), - count_n_wq=Sum('count_n_wq'), - count_n_dl=Sum('count_n_dl'), - count_n_pb=Sum('count_n_pb'), - count_n_dxt=Sum('count_n_dxt'), - count_n_jsqx=Sum('count_n_jsqx'), - count_n_qt=Sum('count_n_qt')) + count_real=Sum("count_real"), + count_ok=Sum("count_ok"), + count_notok=Sum("count_notok"), + count_n_zw=Sum("count_n_zw"), + count_n_tw=Sum("count_n_tw"), + count_n_qp=Sum("count_n_qp"), + count_n_wq=Sum("count_n_wq"), + count_n_dl=Sum("count_n_dl"), + count_n_pb=Sum("count_n_pb"), + count_n_dxt=Sum("count_n_dxt"), + count_n_jsqx=Sum("count_n_jsqx"), + count_n_qt=Sum("count_n_qt"), + ) for i in res: if res[i] is None: res[i] = 0 return Response(res) - @action(methods=['post'], detail=False, perms_map={'post': 'mlog.create'}, serializer_class=MlogQuickSerializer) + @action(methods=["post"], detail=False, perms_map={"post": "mlog.create"}, serializer_class=MlogQuickSerializer) @transaction.atomic def quick(self, request, *args, **kwargs): """快速创建日志 @@ -456,10 +487,7 @@ class MlogViewSet(CustomModelViewSet): sr = MlogQuickSerializer(data=request.data) sr.is_valid(raise_exception=True) vdata = sr.validated_data - mloginit_data = {"mgroup": vdata["mgroup"], - "work_start_time": vdata["work_start_time"], - "handle_user": vdata["handle_user"], - "is_fix": vdata["is_fix"], "create_by": request.user} + mloginit_data = {"mgroup": vdata["mgroup"], "work_start_time": vdata["work_start_time"], "handle_user": vdata["handle_user"], "is_fix": vdata["is_fix"], "create_by": request.user} if "work_end_time" in vdata: mloginit_data["work_end_time"] = vdata["work_end_time"] if "route" in vdata: @@ -468,15 +496,11 @@ class MlogViewSet(CustomModelViewSet): mloginit_data["equipment"] = vdata["equipment"] if "team" in vdata: mloginit_data["team"] = vdata["team"] - sr_1 = MlogInitSerializer(data=mloginit_data) + sr_1 = MlogInitSerializer(data=mloginit_data, request=request) sr_1.is_valid(raise_exception=True) - mlog:Mlog = sr_1.save() + mlog: Mlog = sr_1.save() # 开始创建消耗产出 - mlogbin_data = { - "mlog": mlog.id, - "wm_in": vdata["wm_in"], - "count_use": vdata["count_use"] - } + mlogbin_data = {"mlog": mlog.id, "wm_in": vdata["wm_in"], "count_use": vdata["count_use"]} if "mtask" in vdata: mlogbin_data["mtask"] = vdata["mtask"] if "route" in vdata: @@ -488,60 +512,59 @@ class MlogViewSet(CustomModelViewSet): MlogbInViewSet.p_create_after(mlogbin, wprIds=wprIds) return Response({"mlog": str(mlog.id), "mlogbin": str(mlogbin.id)}) + class HandoverViewSet(CustomModelViewSet): """ list: 交接记录 交接记录 """ + queryset = Handover.objects.all() list_serializer_class = HandoverListSerializer serializer_class = HandoverSerializer - select_related_fields = ['send_user', 'send_mgroup', 'send_dept', 'recive_user', 'recive_mgroup', 'recive_dept', 'wm'] + select_related_fields = ["send_user", "send_mgroup", "send_dept", "recive_user", "recive_mgroup", "recive_dept", "wm"] filterset_class = HandoverFilter - search_fields = ['id', 'material__name', - 'material__number', 'material__specification', 'batch', 'material__model', 'b_handover__batch', "new_batch", "wm__batch"] - prefetch_related_fields = [ - Prefetch('b_handover', queryset=Handoverb.objects.select_related('wm__defect')) - ] + search_fields = ["id", "material__name", "material__number", "material__specification", "batch", "material__model", "b_handover__batch", "new_batch", "wm__batch"] + prefetch_related_fields = [Prefetch("b_handover", queryset=Handoverb.objects.select_related("wm__defect"))] - def perform_destroy(self, instance:Handover): + def perform_destroy(self, instance: Handover): user = self.request.user if instance.submit_time is not None: - raise ParseError('该交接记录已提交不可删除') + raise ParseError("该交接记录已提交不可删除") if instance.send_user != user and instance.recive_user != user and instance.create_by != user: - raise ParseError('非交送人和接收人不可删除该记录') - ticket:Ticket = instance.ticket + raise ParseError("非交送人和接收人不可删除该记录") + ticket: Ticket = instance.ticket if ticket: ticket.delete() instance.delete() def perform_update(self, serializer): - ins:Handover = serializer.instance + ins: Handover = serializer.instance if ins.submit_time is not None: - raise ParseError('该交接记录已提交!') - ticket:Ticket = ins.ticket + raise ParseError("该交接记录已提交!") + ticket: Ticket = ins.ticket if ticket and ticket.state.type != State.STATE_TYPE_START: raise ParseError("该交接记录存在审批,不可修改") serializer.save() - - @action(methods=['post'], detail=False, perms_map={'post': 'handover.create'}, serializer_class=HandoverSerializer) + + @action(methods=["post"], detail=False, perms_map={"post": "handover.create"}, serializer_class=HandoverSerializer) @transaction.atomic def create_and_submit(self, request, *args, **kwargs): user = request.user - sr = HandoverSerializer(data=request.data, context={'request': request}) + sr = HandoverSerializer(data=request.data, context={"request": request}) sr.is_valid(raise_exception=True) ins = sr.save() if ins.type != Handover.H_SCRAP: if ins.recive_user is None or user == ins.recive_user or user.belong_dept == ins.recive_user.belong_dept: pass - else: - raise ParseError('非接收人不可提交') + else: + raise ParseError("非接收人不可提交") if ins.submit_time is None: handover_submit(ins, user, None) return Response({"id": ins.id}) - @action(methods=['post'], detail=True, perms_map={'post': 'handover.submit'}, serializer_class=Serializer) + @action(methods=["post"], detail=True, perms_map={"post": "handover.submit"}, serializer_class=Serializer) @transaction.atomic def submit(self, request, *args, **kwargs): """交接记录提交(变动车间库存) @@ -553,16 +576,16 @@ class HandoverViewSet(CustomModelViewSet): if ins.type != Handover.H_SCRAP: if ins.recive_user is None or user == ins.recive_user or user.belong_dept == ins.recive_user.belong_dept: pass - else: - raise ParseError('非接收人不可提交') - ticket:Ticket = ins.ticket + else: + raise ParseError("非接收人不可提交") + ticket: Ticket = ins.ticket if ticket and ticket.state.type != State.STATE_TYPE_END: - raise ParseError('该交接记录审批未完成,不可提交') + raise ParseError("该交接记录审批未完成,不可提交") if ins.submit_time is None: handover_submit(ins, user, None) return Response() - - @action(methods=['post'], detail=True, perms_map={'post': 'handover.submit'}, serializer_class=Serializer) + + @action(methods=["post"], detail=True, perms_map={"post": "handover.submit"}, serializer_class=Serializer) @transaction.atomic def revert(self, request, *args, **kwargs): """交接记录撤回(变动车间库存) @@ -570,14 +593,14 @@ class HandoverViewSet(CustomModelViewSet): 交接记录撤回 """ ins: Handover = self.get_object() - if ins.new_batch: # 如果是合批 + if ins.new_batch: # 如果是合批 if Handoverb.objects.filter(batch=ins.new_batch, handover__submit_time__isnull=True).exists(): raise ParseError("该合批存在未提交的交接记录,不可撤回") if ins.submit_time: handover_revert(ins, handler=request.user) return Response() - @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=HandoverMgroupSerializer) + @action(methods=["post"], detail=False, perms_map={"post": "*"}, serializer_class=HandoverMgroupSerializer) def mgroups(self, request, *args, **kwargs): """获取可交接到的工段 @@ -586,20 +609,17 @@ class HandoverViewSet(CustomModelViewSet): sr = HandoverMgroupSerializer(data=request.data) sr.is_valid(raise_exception=True) vdata = sr.validated_data - materialInId = vdata['material'] - type = vdata['type'] + materialInId = vdata["material"] + type = vdata["type"] m_qs = Mgroup.objects.all() if type in [Handover.H_NORMAL, Handover.H_CHANGE]: m_qs = m_qs.filter(process__route_p__routepack__state=RoutePack.RP_S_CONFIRM) - m_qs = m_qs.filter( - process__route_p__material_in__id=materialInId)|m_qs.filter( - process__route_p__routemat_route__material__id=materialInId - ) + m_qs = m_qs.filter(process__route_p__material_in__id=materialInId) | m_qs.filter(process__route_p__routemat_route__material__id=materialInId) elif type in [Handover.H_SCRAP]: m_qs = m_qs.filter(process=None) - return Response(list(m_qs.values('id', 'name').distinct())) - - @action(methods=['post'], detail=False, perms_map={'post': 'handover.create'}, serializer_class=GenHandoverWmSerializer) + return Response(list(m_qs.values("id", "name").distinct())) + + @action(methods=["post"], detail=False, perms_map={"post": "handover.create"}, serializer_class=GenHandoverWmSerializer) @transaction.atomic def gen_by_wm(self, request): """从车间库存生成交接记录(废弃) @@ -610,10 +630,17 @@ class HandoverViewSet(CustomModelViewSet): sr.is_valid(raise_exception=True) vdata = sr.validated_data user = request.user - send_date, send_mgroup, send_user, recive_dept, recive_user, wm, count = vdata['send_date'], vdata['send_mgroup'], vdata[ - 'send_user'], vdata['recive_dept'], vdata['recive_user'], vdata['wm'], vdata['count'] + send_date, send_mgroup, send_user, recive_dept, recive_user, wm, count = ( + vdata["send_date"], + vdata["send_mgroup"], + vdata["send_user"], + vdata["recive_dept"], + vdata["recive_user"], + vdata["wm"], + vdata["count"], + ) if send_mgroup.belong_dept != wm.belong_dept: - raise ParseError('送料工段错误!') + raise ParseError("送料工段错误!") handover = Handover.objects.create( send_date=send_date, send_user=send_user, @@ -625,11 +652,11 @@ class HandoverViewSet(CustomModelViewSet): material=wm.material, count=count, wm=wm, - create_by=user + create_by=user, ) - return Response({'handover': handover.id}) + return Response({"handover": handover.id}) - @action(methods=['post'], detail=False, perms_map={'post': 'handover.create'}, serializer_class=GenHandoverSerializer) + @action(methods=["post"], detail=False, perms_map={"post": "handover.create"}, serializer_class=GenHandoverSerializer) @transaction.atomic def gen_by_mlog(self, request): """从生产日志生成交接记录(废弃) @@ -640,9 +667,8 @@ class HandoverViewSet(CustomModelViewSet): sr.is_valid(raise_exception=True) vdata = sr.validated_data user = request.user - send_date, send_user, recive_dept, recive_user = vdata['send_date'], vdata[ - 'send_user'], vdata['recive_dept'], vdata['recive_user'] - for mlog in vdata['mlogs']: + send_date, send_user, recive_dept, recive_user = vdata["send_date"], vdata["send_user"], vdata["recive_dept"], vdata["recive_user"] + for mlog in vdata["mlogs"]: Handover.objects.create( send_date=send_date, send_user=send_user, @@ -655,7 +681,7 @@ class HandoverViewSet(CustomModelViewSet): count_eweight=mlog.count_real_eweight, mlog=mlog, send_mgroup=mlog.mgroup, - create_by=user + create_by=user, ) return Response() @@ -666,12 +692,12 @@ class AttlogViewSet(CustomModelViewSet): 车间到岗 """ + queryset = AttLog.objects.all() serializer_class = AttLogSerializer - select_related_fields = ['user', 'post', 'sflog'] - filterset_fields = ['sflog__mgroup', - 'sflog__mgroup__belong_dept__name', 'sflog__work_date', 'sflog__mgroup__cate', 'sflog__mgroup__need_enm'] - ordering = ['-sflog__work_date', 'create_time'] + select_related_fields = ["user", "post", "sflog"] + filterset_fields = ["sflog__mgroup", "sflog__mgroup__belong_dept__name", "sflog__work_date", "sflog__mgroup__cate", "sflog__mgroup__need_enm"] + ordering = ["-sflog__work_date", "create_time"] class OtherLogViewSet(CustomModelViewSet): @@ -680,18 +706,16 @@ class OtherLogViewSet(CustomModelViewSet): 其他生产记录 """ + queryset = OtherLog.objects.all() serializer_class = OtherLogSerializer - filterset_fields = { - "product": ["exact"], - "handle_date": ["exact", "gte", "lte"] - } - search_fields = ['product'] - ordering = ['-handle_date', '-create_time'] + filterset_fields = {"product": ["exact"], "handle_date": ["exact", "gte", "lte"]} + search_fields = ["product"] + ordering = ["-handle_date", "-create_time"] class MlogbViewSet(CustomListModelMixin, CustomGenericViewSet): - perms_map = {'get': '*'} + perms_map = {"get": "*"} queryset = Mlogb.objects.all() serializer_class = MlogbDetailSerializer select_related_fields = ["material_out", "material_in", "test_user"] @@ -700,7 +724,7 @@ class MlogbViewSet(CustomListModelMixin, CustomGenericViewSet): class MlogbInViewSet(BulkCreateModelMixin, BulkUpdateModelMixin, BulkDestroyModelMixin, CustomGenericViewSet): - perms_map = {'post': 'mlog.update', 'delete': 'mlog.update', 'put': 'mlog.update'} + perms_map = {"post": "mlog.update", "delete": "mlog.update", "put": "mlog.update"} queryset = Mlogb.objects.filter(material_in__isnull=False) serializer_class = MlogbInSerializer update_serializer_class = MlogbInUpdateSerializer @@ -712,44 +736,43 @@ class MlogbInViewSet(BulkCreateModelMixin, BulkUpdateModelMixin, BulkDestroyMode mlog.cal_mlog_count_from_mlogb() def perform_update(self, serializer): - ins:Mlogb = serializer.instance + ins: Mlogb = serializer.instance mlog = MlogViewSet.lock_and_check_can_update(ins.mlog) - ins:Mlogb = serializer.save() + ins: Mlogb = serializer.save() mlog.cal_mlog_count_from_mlogb() - + @classmethod - def p_create_after(cls, mlogbin:Mlogb, wprIds:list=[]): - mlogbin_parent:Mlogb = mlogbin.parent - mlog:Mlog = mlogbin.mlog - mgroup:Mgroup = mlog.mgroup - route:Route = mlogbin.route + def p_create_after(cls, mlogbin: Mlogb, wprIds: list = []): + mlogbin_parent: Mlogb = mlogbin.parent + mlog: Mlog = mlogbin.mlog + mgroup: Mgroup = mlog.mgroup + route: Route = mlogbin.route is_fix = mlog.is_fix if route is None and is_fix is False: - raise ParseError('消耗物料缺失工艺步骤') + raise ParseError("消耗物料缺失工艺步骤") process: Process = mgroup.process if route else None mtype = process.mtype if process else None # qct = mlog.qct # 以及mlogbw - material_in:Material = mlogbin.material_in + material_in: Material = mlogbin.material_in # 如果是返修,则输出和输入相同 - material_out:Material = material_in if is_fix else route.material_out + material_out: Material = material_in if is_fix else route.material_out if material_out is None: - raise ParseError('产物不可为空') + raise ParseError("产物不可为空") if route and route.material_in != material_in: - raise ParseError('工艺步骤输入与实际输入不符') + raise ParseError("工艺步骤输入与实际输入不符") # 如果是主要输入物料且是主批次,才需生成输出 if mlogbin_parent is not None: if mtype and mtype == Process.PRO_MERGE: pass else: return - wm_in: WMaterial = mlogbin.wm_in - if material_in.tracking == Material.MA_TRACKING_SINGLE: # 自动创建mlogbw + if material_in.tracking == Material.MA_TRACKING_SINGLE: # 自动创建mlogbw if wprIds: - wprs_can_use_qs = Wpr.objects.filter(id__in=wprIds).exclude(wpr_mlogbw__mlogb__mlog__submit_time__isnull=True).order_by("number") + wprs_can_use_qs = Wpr.objects.filter(id__in=wprIds).exclude(id__in=Mlogbw.objects.filter(wpr__wm__id=wm_in, mlogb__mlog__submit_time__isnull=True).values_list("wpr__id", flat=True)).order_by("number") if not wprs_can_use_qs.exists(): raise ParseError("当前产品都不可使用") wm_ids = list(wprs_can_use_qs.order_by().values_list("wm__id", flat=True).distinct()) # 这里加入order_by()防止把number加入去重 @@ -758,16 +781,15 @@ class MlogbInViewSet(BulkCreateModelMixin, BulkUpdateModelMixin, BulkDestroyMode else: raise ParseError("单个产品列表不属于当前批次") for wpr in wprs_can_use_qs: - Mlogbw.objects.get_or_create(wpr=wpr, mlogb=mlogbin, defaults={"number": wpr.number}) + Mlogbw.objects.get_or_create(wpr=wpr, mlogb=mlogbin, defaults={"number": wpr.number}) mlogbin.count_use = Mlogbw.objects.filter(mlogb=mlogbin).count() mlogbin.save(update_fields=["count_use"]) else: - wprs_can_use_qs = Wpr.objects.filter(wm=wm_in).exclude(wpr_mlogbw__mlogb__mlog__submit_time__isnull=True).order_by("number") + wprs_can_use_qs = Wpr.objects.filter(wm=wm_in).exclude(id__in=Mlogbw.objects.filter(wpr__wm__id=wm_in, mlogb__mlog__submit_time__isnull=True).values_list("wpr__id", flat=True)).order_by("number") if wprs_can_use_qs.count() == mlogbin.count_use: for wpr in wprs_can_use_qs: Mlogbw.objects.get_or_create(wpr=wpr, mlogb=mlogbin, defaults={"number": wpr.number}) - - + # if qct is None: # mlog.qct = Qct.get(material_out, "process") # mlog.save(update_fields = ["qct"]) @@ -778,13 +800,14 @@ class MlogbInViewSet(BulkCreateModelMixin, BulkUpdateModelMixin, BulkDestroyMode "mlog": mlog, "material_out": material_out, "batch": mlogbin.batch, - "batch_ofrom": wm_in.batch_ofrom, "material_ofrom": wm_in.material_ofrom, - "qct": Qct.get(material_out, "process", "out") + "batch_ofrom": wm_in.batch_ofrom, + "material_ofrom": wm_in.material_ofrom, + "qct": Qct.get(material_out, "process", "out"), } if mtype == Process.PRO_DIV and material_in.tracking == Material.MA_TRACKING_SINGLE: pass else: - m_dict['batch'] = generate_new_batch(mlogbin.batch, mlog) + m_dict["batch"] = generate_new_batch(mlogbin.batch, mlog) else: m_dict = { "mtask": mlogbin_parent.mtask, @@ -792,34 +815,34 @@ class MlogbInViewSet(BulkCreateModelMixin, BulkUpdateModelMixin, BulkDestroyMode "mlog": mlog, "material_out": material_out, "batch": mlogbin_parent.batch, - "batch_ofrom": mlogbin_parent.wm_in.batch_ofrom, "material_ofrom": mlogbin_parent.wm_in.material_ofrom, - "qct": Qct.get(material_out, "process", "out") + "batch_ofrom": mlogbin_parent.wm_in.batch_ofrom, + "material_ofrom": mlogbin_parent.wm_in.material_ofrom, + "qct": Qct.get(material_out, "process", "out"), } if mtype == Process.PRO_DIV and material_in.tracking == Material.MA_TRACKING_SINGLE: pass else: - m_dict['batch'] = generate_new_batch(mlogbin_parent.batch, mlog) + m_dict["batch"] = generate_new_batch(mlogbin_parent.batch, mlog) - if is_fix:# 支持批到批,个到个 - d_count_real = mlogbin.count_use-mlogbin.count_pn_jgqbl + if is_fix: # 支持批到批,个到个 + d_count_real = mlogbin.count_use - mlogbin.count_pn_jgqbl d_count_ok = d_count_real - mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict,{"count_real": d_count_real, "count_ok": d_count_ok, "count_ok_full": d_count_ok})) + mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict, {"count_real": d_count_real, "count_ok": d_count_ok, "count_ok_full": d_count_ok})) if material_in.tracking == Material.MA_TRACKING_SINGLE and material_out.tracking == Material.MA_TRACKING_SINGLE: for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"): wpr_ = mlogbwin.wpr Mlogbw.objects.get_or_create(wpr=wpr_, mlogb=mlogbout, defaults={"number": wpr_.number, "mlogbw_from": mlogbwin}) - elif mtype == Process.PRO_NORMAL: # 正常 支持批到批, 个到个 + elif mtype == Process.PRO_NORMAL: # 正常 支持批到批, 个到个 d_count_real = mlogbin.count_use - mlogbin.count_pn_jgqbl d_count_ok = d_count_real - mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults= - update_dict(m_dict, {"count_real": d_count_real, "count_ok": d_count_ok, "count_ok_full": d_count_ok})) + mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict, {"count_real": d_count_real, "count_ok": d_count_ok, "count_ok_full": d_count_ok})) mlogbout.count_json_from = mlogbin.count_json_from mlogbout.save(update_fields=["count_json_from"]) if material_in.tracking == Material.MA_TRACKING_SINGLE and material_out.tracking == Material.MA_TRACKING_SINGLE: - for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"): - wpr_ = mlogbwin.wpr - Mlogbw.objects.get_or_create(wpr=wpr_, mlogb=mlogbout, defaults={"number": wpr_.number, "mlogbw_from": mlogbwin}) - elif mtype == Process.PRO_DIV: # 切分 支持批到批,个到个, 个到批 + for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"): + wpr_ = mlogbwin.wpr + Mlogbw.objects.get_or_create(wpr=wpr_, mlogb=mlogbout, defaults={"number": wpr_.number, "mlogbw_from": mlogbwin}) + elif mtype == Process.PRO_DIV: # 切分 支持批到批,个到个, 个到批 div_number = route.div_number if material_in.tracking == Material.MA_TRACKING_SINGLE and material_out.tracking == Material.MA_TRACKING_BATCH: lenx = Mlogbw.objects.filter(mlogb=mlogbin).count() @@ -827,8 +850,7 @@ class MlogbInViewSet(BulkCreateModelMixin, BulkUpdateModelMixin, BulkDestroyMode for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"): if process and process.number_to_batch: m_dict["batch"] = mlogbwin.number - mlogbout, _ = Mlogb.objects.get_or_create( - mlogbw_from=mlogbwin, defaults=update_dict(m_dict, {"count_real": div_number, "count_ok": div_number, "count_ok_full": div_number})) + mlogbout, _ = Mlogb.objects.get_or_create(mlogbw_from=mlogbwin, defaults=update_dict(m_dict, {"count_real": div_number, "count_ok": div_number, "count_ok_full": div_number})) if lenx == 1: mlogbout.mlogb_from = mlogbin mlogbout.number_from = mlogbwin.number @@ -839,34 +861,36 @@ class MlogbInViewSet(BulkCreateModelMixin, BulkUpdateModelMixin, BulkDestroyMode for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"): wpr_ = mlogbwin.wpr for key, val in wpr_.oinfo.items(): - if val['name'] == "切片数": + if val["name"] == "切片数": div_number = int(val["val"]) d_count_real = d_count_real + div_number if div_number == 1: Mlogbw.objects.get_or_create(wpr=wpr_, mlogb=mlogbout, defaults={"number": wpr_.number, "mlogbw_from": mlogbwin}) else: for i in range(div_number): - Mlogbw.objects.get_or_create(mlogb=mlogbout, number=f'{wpr_.number}-{i+1}', defaults={"mlogbw_from": mlogbwin}) + Mlogbw.objects.get_or_create(mlogb=mlogbout, number=f"{wpr_.number}-{i+1}", defaults={"mlogbw_from": mlogbwin}) d_count_ok = d_count_real mlogbout.count_real = d_count_real mlogbout.count_ok = d_count_ok mlogbout.count_ok_full = d_count_ok mlogbout.save(update_fields=["count_real", "count_ok", "count_ok_full"]) elif material_in.tracking == Material.MA_TRACKING_BATCH and material_out.tracking == Material.MA_TRACKING_BATCH: - d_count_real = (mlogbin.count_use-mlogbin.count_pn_jgqbl) * div_number + d_count_real = (mlogbin.count_use - mlogbin.count_pn_jgqbl) * div_number d_count_ok = d_count_real - mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict,{"count_real": d_count_real, "count_ok": d_count_ok, "count_ok_full": d_count_ok})) + mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict, {"count_real": d_count_real, "count_ok": d_count_ok, "count_ok_full": d_count_ok})) mlogbout.count_json_from = mlogbin.count_json_from mlogbout.save(update_fields=["count_json_from"]) - elif mtype == Process.PRO_MERGE: # 支持批到批,批到个 + elif mtype == Process.PRO_MERGE: # 支持批到批,批到个 div_number = route.div_number if mlogbin_parent is not None: # 说明是次批 if mlogbin.material_in == mlogbin_parent.material_in: # 如果主次物料一致,则进行处理 - count_use_sum = Mlogb.objects.filter(Q(id=mlogbin_parent.id)|Q(parent=mlogbin_parent), material_in=mlogbin.material_in).aggregate(Sum('count_use'))['count_use__sum'] or 0 - count_pn_jgqbl_sum = Mlogb.objects.filter(Q(id=mlogbin_parent.id)|Q(parent=mlogbin_parent), material_in=mlogbin.material_in).aggregate(Sum('count_pn_jgqbl'))['count_pn_jgqbl__sum'] or 0 - xcount = math.floor( (count_use_sum-count_pn_jgqbl_sum) / div_number) + count_use_sum = Mlogb.objects.filter(Q(id=mlogbin_parent.id) | Q(parent=mlogbin_parent), material_in=mlogbin.material_in).aggregate(Sum("count_use"))["count_use__sum"] or 0 + count_pn_jgqbl_sum = ( + Mlogb.objects.filter(Q(id=mlogbin_parent.id) | Q(parent=mlogbin_parent), material_in=mlogbin.material_in).aggregate(Sum("count_pn_jgqbl"))["count_pn_jgqbl__sum"] or 0 + ) + xcount = math.floor((count_use_sum - count_pn_jgqbl_sum) / div_number) else: # 获取可用的辅料 if not RouteMat.objects.filter(material=mlogbin.material_in, route=route).exists(): @@ -874,8 +898,8 @@ class MlogbInViewSet(BulkCreateModelMixin, BulkUpdateModelMixin, BulkDestroyMode # 使用主批作为后续引用 mlogbin = mlogbin_parent else: - xcount = math.floor( (mlogbin.count_use-mlogbin.count_pn_jgqbl) / div_number) - if xcount >0: + xcount = math.floor((mlogbin.count_use - mlogbin.count_pn_jgqbl) / div_number) + if xcount > 0: d_count_real = xcount d_count_ok = xcount number_to_batch = process.number_to_batch @@ -901,7 +925,7 @@ class MlogbInViewSet(BulkCreateModelMixin, BulkUpdateModelMixin, BulkDestroyMode if wpr_number_rule: numberx = number_list[i] else: - numberx = f'{number}-{i+1}' + numberx = f"{number}-{i+1}" if number_to_batch: mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict, {"count_real": 1, "count_ok": 1, "count_ok_full": 1, "batch": numberx})) Mlogbw.objects.get_or_create(number=numberx, mlogb=mlogbout) @@ -909,88 +933,97 @@ class MlogbInViewSet(BulkCreateModelMixin, BulkUpdateModelMixin, BulkDestroyMode raise ParseError("不支持生成产出物料!") mlog.cal_mlog_count_from_mlogb() - def perform_create(self, serializer): vdata = serializer.validated_data mlogbin: Mlogb = serializer.save() MlogViewSet.lock_and_check_can_update(mlogbin.mlog) - MlogbInViewSet.p_create_after(mlogbin, wprIds=vdata.get('wprIds', [])) + MlogbInViewSet.p_create_after(mlogbin, wprIds=vdata.get("wprIds", [])) @classmethod - def gen_number_with_rule(cls, rule, material_out:Material, mlog:Mlog, gen_count=1): + def gen_number_with_rule(cls, rule, material_out: Material, mlog: Mlog, gen_count=1): from apps.wpmw.models import Wpr + handle_date = mlog.handle_date c_year = handle_date.year c_year2 = str(c_year)[-2:] c_month = handle_date.month m_model = material_out.model # 按生产日志查询 - wpr = Wpr.objects.filter(wpr_mlogbw__mlogb__material_out__isnull=False, - wpr_mlogbw__mlogb__mlog__mgroup__process=mlog.mgroup.process, - wpr_mlogbw__mlogb__mlog__is_fix=False, - wpr_mlogbw__mlogb__mlog__submit_time__isnull=False, - wpr_mlogbw__mlogb__mlog__handle_date__year=c_year, - wpr_mlogbw__mlogb__mlog__handle_date__month=c_month).order_by("number").last() + wpr = ( + Wpr.objects.filter( + wpr_mlogbw__mlogb__material_out__isnull=False, + wpr_mlogbw__mlogb__mlog__mgroup__process=mlog.mgroup.process, + wpr_mlogbw__mlogb__mlog__is_fix=False, + wpr_mlogbw__mlogb__mlog__submit_time__isnull=False, + wpr_mlogbw__mlogb__mlog__handle_date__year=c_year, + wpr_mlogbw__mlogb__mlog__handle_date__month=c_month, + ) + .order_by("number") + .last() + ) cq_w = 4 - if 'n_count:02d' in rule: + if "n_count:02d" in rule: cq_w = 2 n_count = 0 if wpr: try: - n_count = int(wpr.number[-cq_w:].lstrip('0')) + n_count = int(wpr.number[-cq_w:].lstrip("0")) except Exception as e: raise ParseError(f"获取该类产品最后编号错误: {str(e)}") try: if gen_count == 1: - return rule.format(c_year=c_year, c_month=c_month, m_model=m_model, n_count=n_count+1, c_year2=c_year2) + return rule.format(c_year=c_year, c_month=c_month, m_model=m_model, n_count=n_count + 1, c_year2=c_year2) else: - return [rule.format(c_year=c_year, c_month=c_month, m_model=m_model, n_count=n_count+i+1, c_year2=c_year2) for i in range(gen_count)] + return [rule.format(c_year=c_year, c_month=c_month, m_model=m_model, n_count=n_count + i + 1, c_year2=c_year2) for i in range(gen_count)] except Exception as e: raise ParseError(f"个号生成错误: {e}") + class MlogbOutViewSet(BulkUpdateModelMixin, CustomGenericViewSet): perms_map = {"put": "mlog.update"} queryset = Mlogb.objects.filter(material_out__isnull=False) serializer_class = MlogbOutUpdateSerializer def perform_update(self, serializer): - ins:Mlogb = serializer.instance + ins: Mlogb = serializer.instance mlog = MlogViewSet.lock_and_check_can_update(ins.mlog) - material_out = serializer.validated_data.get('material_out') + material_out = serializer.validated_data.get("material_out") if material_out and material_out.tracking == Material.MA_TRACKING_SINGLE: raise ParseError("单件产品不支持直接修改") - ins:Mlogb = serializer.save() + ins: Mlogb = serializer.save() mlog.cal_mlog_count_from_mlogb() class FmlogViewSet(CustomModelViewSet): - perms_map = {'get': '*', 'post': 'mlog.create', 'put': 'mlog.update', 'delete': 'mlog.delete'} + perms_map = {"get": "*", "post": "mlog.create", "put": "mlog.update", "delete": "mlog.delete"} queryset = Fmlog.objects.all() serializer_class = FmlogSerializer update_serializer_class = FmlogUpdateSerializer - filterset_fields = ['mtask', 'mgroup', 'mtask__route', 'enabled'] - select_related_fields = ['mtask', 'mgroup', 'mtask__route', 'mtask__route__routepack', "mtask__route__material_out"] + filterset_fields = ["mtask", "mgroup", "mtask__route", "enabled"] + select_related_fields = ["mtask", "mgroup", "mtask__route", "mtask__route__routepack", "mtask__route__material_out"] search_fields = ["mlog_fmlog__b_mlog__batch"] def destroy(self, request, *args, **kwargs): ins = self.get_object() if Mlog.objects.filter(fmlog=ins).exists(): - raise ParseError('因存在二级日志不可删除') + raise ParseError("因存在二级日志不可删除") return super().destroy(request, *args, **kwargs) - - @action(methods=['post'], detail=True, perms_map={'post': '*'}, serializer_class=Serializer) + + @action(methods=["post"], detail=True, perms_map={"post": "*"}, serializer_class=Serializer) def toggle_enabled(self, request, *args, **kwargs): - ins:Fmlog = self.get_object() + ins: Fmlog = self.get_object() ins.enabled = False if ins.enabled else True ins.save() return Response() - + + class BatchStViewSet(CustomListModelMixin, ComplexQueryMixin, CustomGenericViewSet): """ list: 批次统计数据 批次统计数据 """ + perms_map = {"get": "*"} queryset = BatchSt.objects.all() serializer_class = BatchStSerializer @@ -1015,8 +1048,8 @@ class MlogbwViewSet(CustomModelViewSet): # return super().filter_queryset(queryset) def perform_create(self, serializer): - ins:Mlogbw = serializer.save() - mlog:Mlog = None + ins: Mlogbw = serializer.save() + mlog: Mlog = None if isinstance(ins, list): insx = ins else: @@ -1031,14 +1064,14 @@ class MlogbwViewSet(CustomModelViewSet): if not checked: MlogViewSet.lock_and_check_can_update(mlog) checked = True - wpr:Wpr = ins.wpr - mlogb:Mlogb = ins.mlogb + wpr: Wpr = ins.wpr + mlogb: Mlogb = ins.mlogb if wpr.wm != mlogb.wm_in: raise ParseError("单个与所属批次不一致") - route:Route = mlogb.route if mlogb.route else mlog.route + route: Route = mlogb.route if mlogb.route else mlog.route Mlogbw.cal_count_notok(mlogb) # 如果是输入且输出追踪到个,需同步创建 - material_in:Material = mlogb.material_in + material_in: Material = mlogb.material_in if material_in is not None: mlogb_qs = Mlogb.objects.filter(mlogb_from=mlogb) if route: @@ -1057,7 +1090,7 @@ class MlogbwViewSet(CustomModelViewSet): Mlogbw.objects.get_or_create(wpr=ins.wpr, mlogb=mlogb, defaults={"number": ins.wpr.number, "mlogbw_from": ins}) else: for i in range(route.div_number): - Mlogbw.objects.get_or_create(mlogb=mlogb, number=f'{ins.number}-{i+1}', defaults={"mlogbw_from": ins}) + Mlogbw.objects.get_or_create(mlogb=mlogb, number=f"{ins.number}-{i+1}", defaults={"mlogbw_from": ins}) Mlogbw.cal_count_notok(mlogb) elif mtype == Process.PRO_DIV: mlogbin = ins.mlogb @@ -1065,14 +1098,15 @@ class MlogbwViewSet(CustomModelViewSet): mlog = mlogbin.mlog div_number = route.div_number m_dict = { - "mtask": mlogbin.mtask, - "mlog": mlog, - "batch": ins.number, - "material_out": material_out, - "batch_ofrom": wm_in.batch_ofrom, - "material_ofrom": wm_in.material_ofrom, - "count_real": div_number, - "count_ok": div_number, "qct": mlog.qct + "mtask": mlogbin.mtask, + "mlog": mlog, + "batch": ins.number, + "material_out": material_out, + "batch_ofrom": wm_in.batch_ofrom, + "material_ofrom": wm_in.material_ofrom, + "count_real": div_number, + "count_ok": div_number, + "qct": mlog.qct, } mlogbout, _ = Mlogb.objects.get_or_create(mlogbw_from=ins, defaults=m_dict) if material_out.tracking == Material.MA_TRACKING_SINGLE: @@ -1101,9 +1135,9 @@ class MlogbwViewSet(CustomModelViewSet): mlog = mlogbw.mlogb.mlog mlog = MlogViewSet.lock_and_check_can_update(mlog) Mlogbw.cal_count_notok(mlogbw.mlogb) - + mlog.cal_mlog_count_from_mlogb() - + def after_bulk_update(self, objs): mlogbIds = list(set([obj["mlogb"] for obj in objs])) for mlogbId in mlogbIds: @@ -1112,12 +1146,12 @@ class MlogbwViewSet(CustomModelViewSet): Mlogbw.cal_count_notok(mlogb) mlog.cal_mlog_count_from_mlogb() - def perform_destroy(self, instance:Mlogbw): - mlogb:Mlogb = instance.mlogb + def perform_destroy(self, instance: Mlogbw): + mlogb: Mlogb = instance.mlogb mlog = MlogViewSet.lock_and_check_can_update(mlogb.mlog) if mlogb.material_out is not None and instance.wpr is not None: raise ParseError("不能删除该产出明细") - + # 如果是输入且输出追踪到个,需同步删除 material_in: Material = mlogb.material_in need_cal_mlogb = False @@ -1127,7 +1161,6 @@ class MlogbwViewSet(CustomModelViewSet): if mlogbIds: need_cal_mlogb = True - ftest = instance.ftest instance.delete() if ftest: @@ -1138,10 +1171,10 @@ class MlogbwViewSet(CustomModelViewSet): mlogb_qs = Mlogb.objects.filter(id__in=mlogbIds) for mlogb in mlogb_qs: Mlogbw.cal_count_notok(mlogb) - + mlog.cal_mlog_count_from_mlogb() - @action(methods=['post'], detail=False, perms_map={'post': 'mlog.update'}, serializer_class=MlogbwStartTestSerializer) + @action(methods=["post"], detail=False, perms_map={"post": "mlog.update"}, serializer_class=MlogbwStartTestSerializer) @transaction.atomic def start_test(self, request, *args, **kwargs): sr = MlogbwStartTestSerializer(data=request.data) @@ -1149,6 +1182,7 @@ class MlogbwViewSet(CustomModelViewSet): sr.save() return Response() + class MlogUserViewSet(BulkCreateModelMixin, CustomListModelMixin, BulkDestroyModelMixin, CustomGenericViewSet): perms_map = {"get": "*", "post": "mlog.update", "delete": "mlog.update"} queryset = MlogUser.objects.all() @@ -1161,12 +1195,12 @@ class MlogUserViewSet(BulkCreateModelMixin, CustomListModelMixin, BulkDestroyMod if self.request.method == "GET": qs = qs.filter(mlog__id=self.request.query_params.get("mlog", None)) return qs - + def perform_destroy(self, instance): - mlog:Mlog = instance.mlog + mlog: Mlog = instance.mlog MlogViewSet.lock_and_check_can_update(mlog) return super().perform_destroy(instance) - + class BatchLogViewSet(CustomListModelMixin, CustomGenericViewSet): perms_map = {"get": "*"} @@ -1174,7 +1208,7 @@ class BatchLogViewSet(CustomListModelMixin, CustomGenericViewSet): serializer_class = BatchLogSerializer select_related_fields = ["source", "target"] - @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=Serializer) + @action(methods=["post"], detail=False, perms_map={"post": "*"}, serializer_class=Serializer) @transaction.atomic def dag(self, request): """ @@ -1187,8 +1221,8 @@ class BatchLogViewSet(CustomListModelMixin, CustomGenericViewSet): if not batch: raise ParseError("缺少batch参数") return Response(get_batch_dag(batch, method)) - - @action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=Serializer) + + @action(methods=["post"], detail=False, perms_map={"post": "*"}, serializer_class=Serializer) def batches_to(self, request, *args, **kwargs): """获取已指向的批次号 @@ -1198,4 +1232,4 @@ class BatchLogViewSet(CustomListModelMixin, CustomGenericViewSet): batch = data.get("batch", None) if not batch: raise ParseError("请指定批次号") - return Response(BatchLog.batches_to(batch=batch)) \ No newline at end of file + return Response(BatchLog.batches_to(batch=batch)) diff --git a/changelog.md b/changelog.md index 70a22eba..88a073c3 100644 --- a/changelog.md +++ b/changelog.md @@ -1,3 +1,15 @@ +## 2.8.2025101011 +- feat: 新增功能 + - Ptest val_xj可为空 [caoqianming] + - quick调用serializer时传入request [caoqianming] + - ofm-修改model [zty] + - Mroombooking添加字段 [caoqianming] + - base add_info_for_item 可复用list逻辑 [caoqianming] + - 添加wpr查询参数 [caoqianming] + - base cquery支持annotate [caoqianming] + - mroombooking 返回slots [caoqianming] +- fix: 问题修复 + - p_create_after 自动创建mlogbw时关于exclude语句导致的查询错误 [caoqianming] ## 2.8.2025092816 - feat: 新增功能 - handover revert撤回时做校验 [caoqianming] diff --git a/server/settings.py b/server/settings.py index 35f1295c..ec75b8d0 100755 --- a/server/settings.py +++ b/server/settings.py @@ -35,7 +35,7 @@ sys.path.insert(0, os.path.join(BASE_DIR, 'apps')) ALLOWED_HOSTS = ['*'] SYS_NAME = '星途工厂综合管理系统' -SYS_VERSION = '2.8.2025092816' +SYS_VERSION = '2.8.2025101011' X_FRAME_OPTIONS = 'SAMEORIGIN' # Application definition