1236 lines
54 KiB
Python
1236 lines
54 KiB
Python
import math
|
|
|
|
from django.db import transaction
|
|
from rest_framework.decorators import action
|
|
from rest_framework.exceptions import ParseError
|
|
from rest_framework.response import Response
|
|
from rest_framework.serializers import Serializer
|
|
from django.db.models import Sum
|
|
from django.utils import timezone
|
|
from apps.system.models import User
|
|
|
|
from apps.mtm.models import Material, Process, Route, Mgroup, RoutePack, RouteMat
|
|
from apps.utils.viewsets import CustomGenericViewSet, CustomModelViewSet
|
|
from apps.utils.mixins import CustomListModelMixin, BulkCreateModelMixin, ComplexQueryMixin, BulkDestroyModelMixin, BulkUpdateModelMixin
|
|
|
|
from .filters import StLogFilter, SfLogFilter, WMaterialFilter, MlogFilter, HandoverFilter, MlogbFilter, BatchStFilter, MlogbwFilter
|
|
from .models import SfLog, SfLogExp, StLog, WMaterial, Mlog, Handover, Mlogb, Mlogbw, AttLog, OtherLog, Fmlog, BatchSt, MlogbDefect, MlogUser, BatchLog, Handoverb
|
|
from .serializers import (
|
|
SflogExpSerializer,
|
|
SfLogSerializer,
|
|
StLogSerializer,
|
|
WMaterialSerializer,
|
|
MlogRevertSerializer,
|
|
MlogSerializer,
|
|
MlogRelatedSerializer,
|
|
DeptBatchSerializer,
|
|
HandoverSerializer,
|
|
HandoverUpdateSerializer,
|
|
GenHandoverSerializer,
|
|
GenHandoverWmSerializer,
|
|
MlogAnaSerializer,
|
|
AttLogSerializer,
|
|
OtherLogSerializer,
|
|
MlogInitSerializer,
|
|
MlogChangeSerializer,
|
|
MlogbDetailSerializer,
|
|
MlogbInSerializer,
|
|
MlogbInUpdateSerializer,
|
|
MlogbOutUpdateSerializer,
|
|
FmlogSerializer,
|
|
FmlogUpdateSerializer,
|
|
BatchStSerializer,
|
|
MlogbwCreateUpdateSerializer,
|
|
HandoverMgroupSerializer,
|
|
MlogListSerializer,
|
|
MlogbSerializer,
|
|
MlogUserSerializer,
|
|
BatchLogSerializer,
|
|
MlogQuickSerializer,
|
|
MlogbwStartTestSerializer,
|
|
HandoverListSerializer,
|
|
)
|
|
from .services import mlog_submit, handover_submit, mlog_revert, get_batch_dag, handover_revert
|
|
from apps.wpm.services import mlog_submit_validate, generate_new_batch
|
|
from apps.wf.models import State, Ticket
|
|
from apps.wpmw.models import Wpr
|
|
from apps.qm.models import Qct, Ftest, TestItem
|
|
from apps.enm.models import EnStat
|
|
from django.db.models import Q
|
|
from apps.utils.tools import convert_ordereddict, update_dict
|
|
from django.db.models import Count
|
|
from datetime import datetime, timedelta
|
|
from apps.em.models import Equipment
|
|
from django.db.models import Prefetch
|
|
from drf_yasg.utils import swagger_auto_schema
|
|
from drf_yasg import openapi
|
|
from django.db import connection
|
|
|
|
|
|
# Create your views here.
|
|
|
|
|
|
class StLogViewSet(CustomModelViewSet):
|
|
"""
|
|
list:异常记录
|
|
|
|
异常记录
|
|
"""
|
|
|
|
queryset = StLog.objects.all()
|
|
serializer_class = StLogSerializer
|
|
select_related_fields = ["mgroup"]
|
|
filterset_class = StLogFilter
|
|
ordering = ["-start_time"]
|
|
|
|
def destroy(self, request, *args, **kwargs):
|
|
obj: StLog = self.get_object()
|
|
if obj.is_shutdown:
|
|
if obj.duration_sec <= 60:
|
|
pass
|
|
else:
|
|
raise ParseError("停机记录不可删除")
|
|
return super().destroy(request, *args, **kwargs)
|
|
|
|
|
|
class SfLogViewSet(BulkUpdateModelMixin, CustomListModelMixin, BulkDestroyModelMixin, CustomGenericViewSet):
|
|
"""
|
|
list:值班记录
|
|
|
|
值班记录
|
|
"""
|
|
|
|
perms_map = {"get": "*", "put": "sflog.update", "delete": "sflog.delete"}
|
|
queryset = SfLog.objects.all()
|
|
serializer_class = SfLogSerializer
|
|
select_related_fields = ["mgroup", "shift", "team", "leader"]
|
|
filterset_class = SfLogFilter
|
|
search_fields = ["note"]
|
|
ordering = ["-start_time"]
|
|
|
|
def add_info_for_list(self, data):
|
|
sflogs = [i["id"] for i in data]
|
|
sflogs_dict = EnStat.objects.filter(sflog_id__in=sflogs, type="sflog").values(
|
|
"sflog_id", "total_production", "qua_data", "production_hour", "elec_consume_unit", "run_rate", "production_cost_unit"
|
|
)
|
|
# 创建索引
|
|
sflogs_dict = {x["sflog_id"]: x for x in sflogs_dict}
|
|
for item in data:
|
|
if item["id"] in sflogs_dict:
|
|
item.update({"enstat_info": sflogs_dict[item["id"]]})
|
|
return data
|
|
|
|
@action(methods=["get"], detail=True, perms_map={"get": "*"})
|
|
def init_test(self, request, pk=None):
|
|
"""
|
|
初始化检测录入
|
|
|
|
初始化检测录入
|
|
"""
|
|
from apps.qm.models import QuaStat, TestItem
|
|
from apps.qm.serializers import QuaStatSerializer
|
|
|
|
obj = self.get_object()
|
|
test_materials = Material.objects.filter(id__in=obj.mgroup.test_materials).order_by("sort", "-create_time")
|
|
for material in test_materials:
|
|
testitems = TestItem.objects.filter(id__in=material.testitems).order_by("sort", "-create_time")
|
|
for testitem in testitems:
|
|
params = {"material": material, "testitem": testitem, "sflog": obj}
|
|
QuaStat.objects.get_or_create(**params, defaults={**params, **{"create_by": request.user, "belong_dept": obj.mgroup.belong_dept}})
|
|
qs = QuaStat.objects.filter(sflog=obj).order_by("material__sort", "material__create_time", "testitem__sort", "testitem__create_time")
|
|
sr = QuaStatSerializer(instance=qs, many=True)
|
|
return Response(sr.data)
|
|
|
|
|
|
class SfLogExpViewSet(CustomListModelMixin, BulkUpdateModelMixin, CustomGenericViewSet):
|
|
"""
|
|
list:异常值班处理
|
|
|
|
异常值班处理
|
|
"""
|
|
|
|
perms_map = {"get": "*", "put": "sflogexp.update"}
|
|
queryset = SfLogExp.objects.all()
|
|
serializer_class = SflogExpSerializer
|
|
select_related_fields = ["sflog", "sflog__mgroup", "stlog", "sflog__team", "sflog__shift", "sflog__leader", "stlog"]
|
|
filterset_fields = ["sflog", "stlog"]
|
|
|
|
|
|
class WMaterialViewSet(CustomListModelMixin, CustomGenericViewSet):
|
|
"""
|
|
list: 车间库存
|
|
|
|
车间库存
|
|
"""
|
|
|
|
perms_map = {"get": "*"}
|
|
queryset = WMaterial.objects.filter(count__gt=0)
|
|
serializer_class = WMaterialSerializer
|
|
select_related_fields = ["material", "belong_dept", "material__process", "supplier"]
|
|
search_fields = ["material__name", "material__number", "material__specification", "batch", "material__model", "defect__name", "notok_sign"]
|
|
filterset_class = WMaterialFilter
|
|
ordering_fields = ["update_time", "state", "count", "count_xtest"]
|
|
|
|
def filter_queryset(self, queryset):
|
|
queryset = super().filter_queryset(queryset)
|
|
if self.request.query_params.get("state_all"):
|
|
return queryset
|
|
return queryset.exclude(state=WMaterial.WM_SCRAP)
|
|
|
|
@action(methods=["post"], detail=False, perms_map={"post": "*"}, serializer_class=DeptBatchSerializer)
|
|
def batchs(self, request):
|
|
"""获取车间的批次号(废弃)
|
|
|
|
获取车间的批次号
|
|
"""
|
|
sr = DeptBatchSerializer(data=request.data)
|
|
sr.is_valid(raise_exception=True)
|
|
vdata = sr.validated_data
|
|
batchs = WMaterial.objects.filter(belong_dept__name=vdata["belong_dept_name"], count__gt=0).values_list("batch", flat=True).distinct()
|
|
return Response(list(batchs))
|
|
|
|
@action(methods=["get"], detail=False, perms_map={"get": "*"})
|
|
def defects(self, request, *args, **kwargs):
|
|
"""获取物料的缺陷列表
|
|
|
|
获取物料的缺陷列表
|
|
"""
|
|
from apps.qm.models import Defect
|
|
from apps.qm.serializers import DefectSerializer
|
|
|
|
queryset = self.filter_queryset(self.get_queryset())
|
|
defect_ids = queryset.values_list("defect", flat=True).distinct()
|
|
return Response(DefectSerializer(Defect.objects.filter(id__in=defect_ids), many=True).data)
|
|
|
|
|
|
class MlogViewSet(CustomModelViewSet):
|
|
"""
|
|
list: 生产日志
|
|
|
|
生产日志
|
|
"""
|
|
|
|
queryset = Mlog.objects.all()
|
|
serializer_class = MlogSerializer
|
|
list_serializer_class = MlogListSerializer
|
|
select_related_fields = [
|
|
"create_by",
|
|
"update_by",
|
|
"handle_user",
|
|
"equipment",
|
|
"mgroup",
|
|
"material_in",
|
|
"material_out",
|
|
"material_in__process",
|
|
"material_out__process",
|
|
"mgroup__process",
|
|
"submit_user",
|
|
]
|
|
# select_related_fields = ['create_by', 'update_by', 'mtask', 'mtaskb', 'mgroup',
|
|
# 'handle_user', 'handle_user_2', 'equipment', 'mgroup__belong_dept',
|
|
# 'equipment_2', 'material_in', 'material_out', 'route__routepack', 'submit_user',
|
|
# 'supplier', 'ticket', 'mgroup__process', 'test_user', 'handle_leader', 'test_user', 'team',
|
|
# 'material_in__process', 'material_out__process', "shift", "route"]
|
|
prefetch_related_fields = ["handle_users", "material_outs", "equipments"]
|
|
filterset_class = MlogFilter
|
|
search_fields = [
|
|
"material_in__name",
|
|
"material_in__number",
|
|
"material_in__specification",
|
|
"batch",
|
|
"material_in__model",
|
|
"material_out__name",
|
|
"material_out__number",
|
|
"material_out__specification",
|
|
"material_out__model",
|
|
"b_mlog__batch",
|
|
]
|
|
ordering_fields = ["create_time", "update_time"]
|
|
|
|
def add_info_for_item(self, data):
|
|
if data.get("oinfo_json", {}):
|
|
czx_dict = dict(TestItem.objects.filter(id__in=data.get("oinfo_json", {}).keys()).values_list("id", "name"))
|
|
data["oinfo_json_"] = {czx_dict.get(k, k): v for k, v in data.get("oinfo_json", {}).items()}
|
|
return data
|
|
|
|
@classmethod
|
|
def lock_and_check_can_update(cls, mlog: Mlog):
|
|
if not connection.in_atomic_block:
|
|
raise ParseError("请在事务中调用该方法")
|
|
mlog_lock: Mlog = Mlog.objects.select_for_update().get(id=mlog.id)
|
|
if mlog.submit_time is not None:
|
|
raise ParseError("该记录已提交无法更改")
|
|
return mlog_lock
|
|
|
|
def get_serializer_class(self):
|
|
if self.request.query_params.get("with_mlogb", False):
|
|
return MlogSerializer
|
|
return super().get_serializer_class()
|
|
|
|
@swagger_auto_schema(
|
|
manual_parameters=[
|
|
openapi.Parameter(name="query", in_=openapi.IN_QUERY, description="定制返回数据", type=openapi.TYPE_STRING, required=False),
|
|
openapi.Parameter(name="with_children", in_=openapi.IN_QUERY, description="带有children(yes/no/count)", type=openapi.TYPE_STRING, required=False),
|
|
openapi.Parameter(name="with_mlogbw", in_=openapi.IN_QUERY, description="带有个列表(out)", type=openapi.TYPE_STRING, required=False),
|
|
]
|
|
)
|
|
def list(self, request, *args, **kwargs):
|
|
from django.db import connection
|
|
from django.conf import settings
|
|
|
|
# 清空之前的查询记录
|
|
connection.queries_log.clear()
|
|
|
|
# 调用父类的 list 方法
|
|
response = super().list(request, *args, **kwargs)
|
|
|
|
# 获取执行的 SQL 查询
|
|
queries = connection.queries
|
|
sql_queries = []
|
|
|
|
for query in queries:
|
|
sql_queries.append({"sql": query["sql"], "time": query["time"], "params": query.get("params", [])})
|
|
|
|
# 将 SQL 信息添加到响应中(仅在调试模式)
|
|
if settings.DEBUG:
|
|
response.data["_debug"] = {"sql_queries": sql_queries, "total_queries": len(queries), "total_time": sum(float(q["time"]) for q in queries)}
|
|
|
|
return response
|
|
|
|
def add_info_for_list(self, data):
|
|
czx_dict = {}
|
|
for item in data:
|
|
czx_dict.update(item.get("oinfo_json", {}))
|
|
czx_dict = dict(TestItem.objects.filter(id__in=czx_dict.keys()).values_list("id", "name"))
|
|
for item in data:
|
|
if item.get("oinfo_json", None):
|
|
item["oinfo_json_"] = {czx_dict.get(k, k): v for k, v in item.get("oinfo_json", {}).items()}
|
|
# if self.request.query_params.get('with_mlogb', False):
|
|
# data_dict = {item['id']: {**item, "mlogb_full": [], "mlogb": []} for item in data}
|
|
# mlogb_qs = Mlogb.objects.filter(mlog__id__in=data_dict.keys()).select_related("material_in", "material_out").order_by("create_time")
|
|
# mlogb_data = MlogbDetailSerializer(instance=mlogb_qs, many=True).data
|
|
# for item in mlogb_data:
|
|
# item_dict = convert_ordereddict(item)
|
|
# if item_dict["mlog"] in data_dict:
|
|
# data_dict[item_dict["mlog"]]["mlogb_full"].append(item_dict)
|
|
# if item.get("material_out", None):
|
|
# data_dict[item_dict["mlog"]]["mlogb"].append(item_dict)
|
|
# data = list(data_dict.values())
|
|
if self.request.query_params.get("with_mlogbw", False) == "out":
|
|
wpr_dict = {item["id"]: [] for item in data}
|
|
wpr_out_qs = Mlogbw.objects.filter(mlogb__mlog__id__in=wpr_dict.keys(), mlogb__material_out__isnull=False).values("mlogb__mlog__id", "number")
|
|
for item in wpr_out_qs:
|
|
wpr_dict[item["mlogb__mlog__id"]].append(item["number"])
|
|
|
|
for item in data:
|
|
item["mlogbw_number_list"] = wpr_dict.get(item["id"], None)
|
|
return data
|
|
|
|
def perform_destroy(self, instance):
|
|
if instance.submit_time is not None:
|
|
raise ParseError("日志已提交不可变动")
|
|
if instance.ticket and instance.ticket.state != State.STATE_TYPE_START:
|
|
raise ParseError("该日志存在审批!")
|
|
# delete_auditlog(instance, instance.id)
|
|
if instance.ticket:
|
|
instance.ticket.delete()
|
|
ftestIds = list(Ftest.objects.filter(mlogbw_ftest__mlogb__mlog=instance).values_list("id", flat=True))
|
|
instance.delete()
|
|
Ftest.objects.filter(id__in=ftestIds).delete()
|
|
|
|
def perform_update(self, serializer):
|
|
ins = serializer.instance
|
|
if ins.ticket and ins.ticket.state != State.STATE_TYPE_START:
|
|
raise ParseError("该日志在审批中不可修改!")
|
|
if ins.submit_time is not None:
|
|
raise ParseError("该日志已提交!")
|
|
# val_old = MlogSerializer(instance=ins).data
|
|
serializer.save()
|
|
# val_new = MlogSerializer(instance=ins).data
|
|
# create_auditlog('update', ins, val_new, val_old)
|
|
|
|
@action(methods=["post"], detail=False, perms_map={"post": "mlog.init"}, serializer_class=MlogInitSerializer)
|
|
def init(self, request, *args, **kwargs):
|
|
"""初始化日志
|
|
|
|
初始化日志
|
|
"""
|
|
sr = self.get_serializer(data=request.data)
|
|
sr.is_valid(raise_exception=True)
|
|
ins = sr.save()
|
|
return Response(MlogSerializer(ins).data)
|
|
|
|
@action(methods=["post"], detail=True, perms_map={"post": "mlog.update"}, serializer_class=MlogChangeSerializer)
|
|
@transaction.atomic
|
|
def change(self, request, *args, **kwargs):
|
|
"""修改日志
|
|
|
|
修改日志
|
|
"""
|
|
ins = self.get_object()
|
|
if ins.submit_time is not None:
|
|
raise ParseError("该日志已提交!")
|
|
if ins.ticket and ins.ticket.state != State.STATE_TYPE_START:
|
|
raise ParseError("该日志在审批中不可修改!")
|
|
sr = MlogChangeSerializer(instance=ins, data=request.data, partial=True)
|
|
sr.is_valid(raise_exception=True)
|
|
sr.save()
|
|
return Response(MlogSerializer(ins).data)
|
|
|
|
@action(methods=["post"], detail=True, perms_map={"post": "mlog.submit"}, serializer_class=Serializer)
|
|
@transaction.atomic
|
|
def submit(self, request, *args, **kwargs):
|
|
"""日志提交(变动车间库存)
|
|
|
|
日志提交
|
|
"""
|
|
ins: Mlog = self.get_object()
|
|
now = timezone.now()
|
|
if ins.ticket:
|
|
raise ParseError("该日志存在审批!")
|
|
else:
|
|
p: Process = ins.mgroup.process
|
|
if p.mlog_need_ticket:
|
|
raise ParseError("该日志需要审批!")
|
|
mlog_submit_validate(ins)
|
|
mlog_submit(ins, self.request.user, now)
|
|
vdata_new = MlogSerializer(ins).data
|
|
return Response(vdata_new)
|
|
|
|
@action(methods=["post"], detail=True, perms_map={"post": "mlog.submit"}, serializer_class=MlogRevertSerializer)
|
|
@transaction.atomic
|
|
def revert(self, request, *args, **kwargs):
|
|
"""撤回日志提交
|
|
|
|
撤回日志提交
|
|
"""
|
|
ins: Mlog = self.get_object()
|
|
if ins.ticket:
|
|
raise ParseError("该日志存在审批!")
|
|
user = request.user
|
|
if ins.submit_time is None:
|
|
raise ParseError("日志未提交不可撤销")
|
|
if user != ins.submit_user:
|
|
raise ParseError("非提交人不可撤销!")
|
|
now = timezone.now()
|
|
mlog_revert(ins, user, now)
|
|
return Response(MlogSerializer(instance=ins).data)
|
|
|
|
@action(methods=["post"], detail=False, perms_map={"post": "*"}, serializer_class=MlogRelatedSerializer)
|
|
def related_first(self, request, *args, **kwargs):
|
|
"""获取相关任务的第一道工序日志
|
|
|
|
获取相关任务的第一道工序日志
|
|
"""
|
|
sr = MlogRelatedSerializer(data=request.data)
|
|
sr.is_valid(raise_exception=True)
|
|
vdata = sr.validated_data
|
|
mtask = vdata["mtask"]
|
|
if mtask.utask:
|
|
mtasks = mtask.related
|
|
mlogs = Mlog.objects.filter(mtask__in=mtasks).order_by("mtask__mgroup__process__sort", "batch", "create_time")
|
|
data = MlogSerializer(instance=mlogs, many=True).data
|
|
res_data = []
|
|
for ind, val in enumerate(data):
|
|
if ind == 0:
|
|
res_data.append(val)
|
|
else:
|
|
before = data[ind - 1]
|
|
if val["batch"] != before["batch"]:
|
|
res_data.append(val)
|
|
return Response(res_data)
|
|
|
|
@action(methods=["post"], detail=False, perms_map={"post": "*"}, serializer_class=MlogAnaSerializer)
|
|
def ana(self, request):
|
|
"""核心统计数据
|
|
|
|
核心统计数据
|
|
"""
|
|
sr = MlogAnaSerializer(data=request.data)
|
|
sr.is_valid(raise_exception=True)
|
|
vdata = sr.validated_data
|
|
mlogs = Mlog.objects.exclude(submit_time=None)
|
|
if vdata.get("belong_dept_name", ""):
|
|
mlogs = mlogs.filter(mgroup__belong_dept__name=vdata["belong_dept_name"])
|
|
if vdata.get("material_cate", ""):
|
|
mlogs = mlogs.filter(material_out__cate=vdata["material_cate"])
|
|
if vdata.get("start_date", ""):
|
|
mlogs = mlogs.filter(handle_date__gte=vdata["start_date"])
|
|
if vdata.get("end_date", ""):
|
|
mlogs = mlogs.filter(handle_date__lte=vdata["end_date"])
|
|
res = mlogs.aggregate(
|
|
count_real=Sum("count_real"),
|
|
count_ok=Sum("count_ok"),
|
|
count_notok=Sum("count_notok"),
|
|
count_n_zw=Sum("count_n_zw"),
|
|
count_n_tw=Sum("count_n_tw"),
|
|
count_n_qp=Sum("count_n_qp"),
|
|
count_n_wq=Sum("count_n_wq"),
|
|
count_n_dl=Sum("count_n_dl"),
|
|
count_n_pb=Sum("count_n_pb"),
|
|
count_n_dxt=Sum("count_n_dxt"),
|
|
count_n_jsqx=Sum("count_n_jsqx"),
|
|
count_n_qt=Sum("count_n_qt"),
|
|
)
|
|
for i in res:
|
|
if res[i] is None:
|
|
res[i] = 0
|
|
return Response(res)
|
|
|
|
@action(methods=["post"], detail=False, perms_map={"post": "mlog.create"}, serializer_class=MlogQuickSerializer)
|
|
@transaction.atomic
|
|
def quick(self, request, *args, **kwargs):
|
|
"""快速创建日志
|
|
|
|
快速创建日志
|
|
"""
|
|
sr = MlogQuickSerializer(data=request.data)
|
|
sr.is_valid(raise_exception=True)
|
|
vdata = sr.validated_data
|
|
mloginit_data = {"mgroup": vdata["mgroup"], "work_start_time": vdata["work_start_time"], "handle_user": vdata["handle_user"], "is_fix": vdata["is_fix"], "create_by": request.user}
|
|
if "work_end_time" in vdata:
|
|
mloginit_data["work_end_time"] = vdata["work_end_time"]
|
|
if "route" in vdata:
|
|
mloginit_data["route"] = vdata["route"]
|
|
if "equipment" in vdata:
|
|
mloginit_data["equipment"] = vdata["equipment"]
|
|
if "team" in vdata:
|
|
mloginit_data["team"] = vdata["team"]
|
|
sr_1 = MlogInitSerializer(data=mloginit_data, request=request)
|
|
sr_1.is_valid(raise_exception=True)
|
|
mlog: Mlog = sr_1.save()
|
|
# 开始创建消耗产出
|
|
mlogbin_data = {"mlog": mlog.id, "wm_in": vdata["wm_in"], "count_use": vdata["count_use"]}
|
|
if "mtask" in vdata:
|
|
mlogbin_data["mtask"] = vdata["mtask"]
|
|
if "route" in vdata:
|
|
mlogbin_data["route"] = vdata["route"]
|
|
wprIds = vdata.get("wprs_in", [])
|
|
sr_2 = MlogbInSerializer(data=mlogbin_data)
|
|
sr_2.is_valid(raise_exception=True)
|
|
mlogbin = sr_2.save()
|
|
MlogbInViewSet.p_create_after(mlogbin, wprIds=wprIds)
|
|
return Response({"mlog": str(mlog.id), "mlogbin": str(mlogbin.id)})
|
|
|
|
|
|
class HandoverViewSet(CustomModelViewSet):
|
|
"""
|
|
list: 交接记录
|
|
|
|
交接记录
|
|
"""
|
|
|
|
queryset = Handover.objects.all()
|
|
list_serializer_class = HandoverListSerializer
|
|
serializer_class = HandoverSerializer
|
|
select_related_fields = ["send_user", "send_mgroup", "send_dept", "recive_user", "recive_mgroup", "recive_dept", "wm"]
|
|
filterset_class = HandoverFilter
|
|
search_fields = ["id", "material__name", "material__number", "material__specification", "batch", "material__model", "b_handover__batch", "new_batch", "wm__batch"]
|
|
prefetch_related_fields = [Prefetch("b_handover", queryset=Handoverb.objects.select_related("wm__defect"))]
|
|
|
|
def perform_destroy(self, instance: Handover):
|
|
user = self.request.user
|
|
if instance.submit_time is not None:
|
|
raise ParseError("该交接记录已提交不可删除")
|
|
if instance.send_user != user and instance.recive_user != user and instance.create_by != user:
|
|
raise ParseError("非交送人和接收人不可删除该记录")
|
|
ticket: Ticket = instance.ticket
|
|
if ticket:
|
|
ticket.delete()
|
|
instance.delete()
|
|
|
|
def perform_update(self, serializer):
|
|
ins: Handover = serializer.instance
|
|
if ins.submit_time is not None:
|
|
raise ParseError("该交接记录已提交!")
|
|
ticket: Ticket = ins.ticket
|
|
if ticket and ticket.state.type != State.STATE_TYPE_START:
|
|
raise ParseError("该交接记录存在审批,不可修改")
|
|
serializer.save()
|
|
|
|
@action(methods=["post"], detail=False, perms_map={"post": "handover.create"}, serializer_class=HandoverSerializer)
|
|
@transaction.atomic
|
|
def create_and_submit(self, request, *args, **kwargs):
|
|
user = request.user
|
|
sr = HandoverSerializer(data=request.data, context={"request": request})
|
|
sr.is_valid(raise_exception=True)
|
|
ins = sr.save()
|
|
if ins.type != Handover.H_SCRAP:
|
|
if ins.recive_user is None or user == ins.recive_user or user.belong_dept == ins.recive_user.belong_dept:
|
|
pass
|
|
else:
|
|
raise ParseError("非接收人不可提交")
|
|
if ins.submit_time is None:
|
|
handover_submit(ins, user, None)
|
|
return Response({"id": ins.id})
|
|
|
|
@action(methods=["post"], detail=True, perms_map={"post": "handover.submit"}, serializer_class=Serializer)
|
|
@transaction.atomic
|
|
def submit(self, request, *args, **kwargs):
|
|
"""交接记录提交(变动车间库存)
|
|
|
|
交接记录提交
|
|
"""
|
|
ins: Handover = self.get_object()
|
|
user: User = self.request.user
|
|
if ins.type != Handover.H_SCRAP:
|
|
if ins.recive_user is None or user == ins.recive_user or user.belong_dept == ins.recive_user.belong_dept:
|
|
pass
|
|
else:
|
|
raise ParseError("非接收人不可提交")
|
|
ticket: Ticket = ins.ticket
|
|
if ticket and ticket.state.type != State.STATE_TYPE_END:
|
|
raise ParseError("该交接记录审批未完成,不可提交")
|
|
if ins.submit_time is None:
|
|
handover_submit(ins, user, None)
|
|
return Response()
|
|
|
|
@action(methods=["post"], detail=True, perms_map={"post": "handover.submit"}, serializer_class=Serializer)
|
|
@transaction.atomic
|
|
def revert(self, request, *args, **kwargs):
|
|
"""交接记录撤回(变动车间库存)
|
|
|
|
交接记录撤回
|
|
"""
|
|
ins: Handover = self.get_object()
|
|
if ins.new_batch: # 如果是合批
|
|
if Handoverb.objects.filter(batch=ins.new_batch, handover__submit_time__isnull=True).exists():
|
|
raise ParseError("该合批存在未提交的交接记录,不可撤回")
|
|
if ins.submit_time:
|
|
handover_revert(ins, handler=request.user)
|
|
return Response()
|
|
|
|
@action(methods=["post"], detail=False, perms_map={"post": "*"}, serializer_class=HandoverMgroupSerializer)
|
|
def mgroups(self, request, *args, **kwargs):
|
|
"""获取可交接到的工段
|
|
|
|
获取可交接到的工段
|
|
"""
|
|
sr = HandoverMgroupSerializer(data=request.data)
|
|
sr.is_valid(raise_exception=True)
|
|
vdata = sr.validated_data
|
|
materialInId = vdata["material"]
|
|
type = vdata["type"]
|
|
m_qs = Mgroup.objects.all()
|
|
if type in [Handover.H_NORMAL, Handover.H_CHANGE]:
|
|
m_qs = m_qs.filter(process__route_p__routepack__state=RoutePack.RP_S_CONFIRM)
|
|
m_qs = m_qs.filter(process__route_p__material_in__id=materialInId) | m_qs.filter(process__route_p__routemat_route__material__id=materialInId)
|
|
elif type in [Handover.H_SCRAP]:
|
|
m_qs = m_qs.filter(process=None)
|
|
return Response(list(m_qs.values("id", "name").distinct()))
|
|
|
|
@action(methods=["post"], detail=False, perms_map={"post": "handover.create"}, serializer_class=GenHandoverWmSerializer)
|
|
@transaction.atomic
|
|
def gen_by_wm(self, request):
|
|
"""从车间库存生成交接记录(废弃)
|
|
|
|
从车间库存生成交接记录
|
|
"""
|
|
sr = GenHandoverWmSerializer(data=request.data)
|
|
sr.is_valid(raise_exception=True)
|
|
vdata = sr.validated_data
|
|
user = request.user
|
|
send_date, send_mgroup, send_user, recive_dept, recive_user, wm, count = (
|
|
vdata["send_date"],
|
|
vdata["send_mgroup"],
|
|
vdata["send_user"],
|
|
vdata["recive_dept"],
|
|
vdata["recive_user"],
|
|
vdata["wm"],
|
|
vdata["count"],
|
|
)
|
|
if send_mgroup.belong_dept != wm.belong_dept:
|
|
raise ParseError("送料工段错误!")
|
|
handover = Handover.objects.create(
|
|
send_date=send_date,
|
|
send_user=send_user,
|
|
recive_dept=recive_dept,
|
|
recive_user=recive_user,
|
|
send_mgroup=send_mgroup,
|
|
send_dept=wm.belong_dept,
|
|
batch=wm.batch,
|
|
material=wm.material,
|
|
count=count,
|
|
wm=wm,
|
|
create_by=user,
|
|
)
|
|
return Response({"handover": handover.id})
|
|
|
|
@action(methods=["post"], detail=False, perms_map={"post": "handover.create"}, serializer_class=GenHandoverSerializer)
|
|
@transaction.atomic
|
|
def gen_by_mlog(self, request):
|
|
"""从生产日志生成交接记录(废弃)
|
|
|
|
从生产日志生成交接记录
|
|
"""
|
|
sr = GenHandoverSerializer(data=request.data)
|
|
sr.is_valid(raise_exception=True)
|
|
vdata = sr.validated_data
|
|
user = request.user
|
|
send_date, send_user, recive_dept, recive_user = vdata["send_date"], vdata["send_user"], vdata["recive_dept"], vdata["recive_user"]
|
|
for mlog in vdata["mlogs"]:
|
|
Handover.objects.create(
|
|
send_date=send_date,
|
|
send_user=send_user,
|
|
recive_dept=recive_dept,
|
|
recive_user=recive_user,
|
|
send_dept=mlog.mgroup.belong_dept,
|
|
batch=mlog.batch,
|
|
material=mlog.material_out,
|
|
count=mlog.count_real,
|
|
count_eweight=mlog.count_real_eweight,
|
|
mlog=mlog,
|
|
send_mgroup=mlog.mgroup,
|
|
create_by=user,
|
|
)
|
|
return Response()
|
|
|
|
|
|
class AttlogViewSet(CustomModelViewSet):
|
|
"""
|
|
list: 车间到岗
|
|
|
|
车间到岗
|
|
"""
|
|
|
|
queryset = AttLog.objects.all()
|
|
serializer_class = AttLogSerializer
|
|
select_related_fields = ["user", "post", "sflog"]
|
|
filterset_fields = ["sflog__mgroup", "sflog__mgroup__belong_dept__name", "sflog__work_date", "sflog__mgroup__cate", "sflog__mgroup__need_enm"]
|
|
ordering = ["-sflog__work_date", "create_time"]
|
|
|
|
|
|
class OtherLogViewSet(CustomModelViewSet):
|
|
"""
|
|
list: 其他生产记录
|
|
|
|
其他生产记录
|
|
"""
|
|
|
|
queryset = OtherLog.objects.all()
|
|
serializer_class = OtherLogSerializer
|
|
filterset_fields = {"product": ["exact"], "handle_date": ["exact", "gte", "lte"]}
|
|
search_fields = ["product"]
|
|
ordering = ["-handle_date", "-create_time"]
|
|
|
|
|
|
class MlogbViewSet(CustomListModelMixin, CustomGenericViewSet):
|
|
perms_map = {"get": "*"}
|
|
queryset = Mlogb.objects.all()
|
|
serializer_class = MlogbDetailSerializer
|
|
select_related_fields = ["material_out", "material_in", "test_user"]
|
|
filterset_class = MlogbFilter
|
|
ordering = ["create_time"]
|
|
|
|
|
|
class MlogbInViewSet(BulkCreateModelMixin, BulkUpdateModelMixin, BulkDestroyModelMixin, CustomGenericViewSet):
|
|
perms_map = {"post": "mlog.update", "delete": "mlog.update", "put": "mlog.update"}
|
|
queryset = Mlogb.objects.filter(material_in__isnull=False)
|
|
serializer_class = MlogbInSerializer
|
|
update_serializer_class = MlogbInUpdateSerializer
|
|
|
|
def perform_destroy(self, instance):
|
|
ins: Mlogb = instance
|
|
mlog = MlogViewSet.lock_and_check_can_update(ins.mlog)
|
|
ins.delete()
|
|
mlog.cal_mlog_count_from_mlogb()
|
|
|
|
def perform_update(self, serializer):
|
|
ins: Mlogb = serializer.instance
|
|
mlog = MlogViewSet.lock_and_check_can_update(ins.mlog)
|
|
ins: Mlogb = serializer.save()
|
|
mlog.cal_mlog_count_from_mlogb()
|
|
|
|
@classmethod
|
|
def p_create_after(cls, mlogbin: Mlogb, wprIds: list = []):
|
|
mlogbin_parent: Mlogb = mlogbin.parent
|
|
mlog: Mlog = mlogbin.mlog
|
|
mgroup: Mgroup = mlog.mgroup
|
|
route: Route = mlogbin.route
|
|
is_fix = mlog.is_fix
|
|
if route is None and is_fix is False:
|
|
raise ParseError("消耗物料缺失工艺步骤")
|
|
process: Process = mgroup.process if route else None
|
|
mtype = process.mtype if process else None
|
|
# qct = mlog.qct
|
|
# 以及mlogbw
|
|
material_in: Material = mlogbin.material_in
|
|
# 如果是返修,则输出和输入相同
|
|
material_out: Material = material_in if is_fix else route.material_out
|
|
if material_out is None:
|
|
raise ParseError("产物不可为空")
|
|
if route and route.material_in != material_in:
|
|
raise ParseError("工艺步骤输入与实际输入不符")
|
|
# 如果是主要输入物料且是主批次,才需生成输出
|
|
if mlogbin_parent is not None:
|
|
if mtype and mtype == Process.PRO_MERGE:
|
|
pass
|
|
else:
|
|
return
|
|
|
|
wm_in: WMaterial = mlogbin.wm_in
|
|
|
|
if material_in.tracking == Material.MA_TRACKING_SINGLE: # 自动创建mlogbw
|
|
if wprIds:
|
|
wprs_can_use_qs = Wpr.objects.filter(id__in=wprIds).exclude(id__in=Mlogbw.objects.filter(wpr__wm__id=wm_in, mlogb__mlog__submit_time__isnull=True).values_list("wpr__id", flat=True)).order_by("number")
|
|
if not wprs_can_use_qs.exists():
|
|
raise ParseError("当前产品都不可使用")
|
|
wm_ids = list(wprs_can_use_qs.order_by().values_list("wm__id", flat=True).distinct()) # 这里加入order_by()防止把number加入去重
|
|
if len(wm_ids) == 1 and wm_ids[0] == wm_in.id:
|
|
pass
|
|
else:
|
|
raise ParseError("单个产品列表不属于当前批次")
|
|
for wpr in wprs_can_use_qs:
|
|
Mlogbw.objects.get_or_create(wpr=wpr, mlogb=mlogbin, defaults={"number": wpr.number})
|
|
mlogbin.count_use = Mlogbw.objects.filter(mlogb=mlogbin).count()
|
|
mlogbin.save(update_fields=["count_use"])
|
|
else:
|
|
wprs_can_use_qs = Wpr.objects.filter(wm=wm_in).exclude(id__in=Mlogbw.objects.filter(wpr__wm__id=wm_in, mlogb__mlog__submit_time__isnull=True).values_list("wpr__id", flat=True)).order_by("number")
|
|
if wprs_can_use_qs.count() == mlogbin.count_use:
|
|
for wpr in wprs_can_use_qs:
|
|
Mlogbw.objects.get_or_create(wpr=wpr, mlogb=mlogbin, defaults={"number": wpr.number})
|
|
|
|
# if qct is None:
|
|
# mlog.qct = Qct.get(material_out, "process")
|
|
# mlog.save(update_fields = ["qct"])
|
|
if mlogbin_parent is None:
|
|
m_dict = {
|
|
"mtask": mlogbin.mtask,
|
|
"route": route,
|
|
"mlog": mlog,
|
|
"material_out": material_out,
|
|
"batch": mlogbin.batch,
|
|
"batch_ofrom": wm_in.batch_ofrom,
|
|
"material_ofrom": wm_in.material_ofrom,
|
|
"qct": Qct.get(material_out, "process", "out"),
|
|
}
|
|
if mtype == Process.PRO_DIV and material_in.tracking == Material.MA_TRACKING_SINGLE:
|
|
pass
|
|
else:
|
|
m_dict["batch"] = generate_new_batch(mlogbin.batch, mlog)
|
|
else:
|
|
m_dict = {
|
|
"mtask": mlogbin_parent.mtask,
|
|
"route": route,
|
|
"mlog": mlog,
|
|
"material_out": material_out,
|
|
"batch": mlogbin_parent.batch,
|
|
"batch_ofrom": mlogbin_parent.wm_in.batch_ofrom,
|
|
"material_ofrom": mlogbin_parent.wm_in.material_ofrom,
|
|
"qct": Qct.get(material_out, "process", "out"),
|
|
}
|
|
if mtype == Process.PRO_DIV and material_in.tracking == Material.MA_TRACKING_SINGLE:
|
|
pass
|
|
else:
|
|
m_dict["batch"] = generate_new_batch(mlogbin_parent.batch, mlog)
|
|
|
|
if is_fix: # 支持批到批,个到个
|
|
d_count_real = mlogbin.count_use - mlogbin.count_pn_jgqbl
|
|
d_count_ok = d_count_real
|
|
mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict, {"count_real": d_count_real, "count_ok": d_count_ok, "count_ok_full": d_count_ok}))
|
|
if material_in.tracking == Material.MA_TRACKING_SINGLE and material_out.tracking == Material.MA_TRACKING_SINGLE:
|
|
for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"):
|
|
wpr_ = mlogbwin.wpr
|
|
Mlogbw.objects.get_or_create(wpr=wpr_, mlogb=mlogbout, defaults={"number": wpr_.number, "mlogbw_from": mlogbwin})
|
|
elif mtype == Process.PRO_NORMAL: # 正常 支持批到批, 个到个
|
|
d_count_real = mlogbin.count_use - mlogbin.count_pn_jgqbl
|
|
d_count_ok = d_count_real
|
|
mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict, {"count_real": d_count_real, "count_ok": d_count_ok, "count_ok_full": d_count_ok}))
|
|
mlogbout.count_json_from = mlogbin.count_json_from
|
|
mlogbout.save(update_fields=["count_json_from"])
|
|
if material_in.tracking == Material.MA_TRACKING_SINGLE and material_out.tracking == Material.MA_TRACKING_SINGLE:
|
|
for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"):
|
|
wpr_ = mlogbwin.wpr
|
|
Mlogbw.objects.get_or_create(wpr=wpr_, mlogb=mlogbout, defaults={"number": wpr_.number, "mlogbw_from": mlogbwin})
|
|
elif mtype == Process.PRO_DIV: # 切分 支持批到批,个到个, 个到批
|
|
div_number = route.div_number
|
|
if material_in.tracking == Material.MA_TRACKING_SINGLE and material_out.tracking == Material.MA_TRACKING_BATCH:
|
|
lenx = Mlogbw.objects.filter(mlogb=mlogbin).count()
|
|
# 用个号做批号是用于后续在复用个号可以追踪到原先的个
|
|
for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"):
|
|
if process and process.number_to_batch:
|
|
m_dict["batch"] = mlogbwin.number
|
|
mlogbout, _ = Mlogb.objects.get_or_create(mlogbw_from=mlogbwin, defaults=update_dict(m_dict, {"count_real": div_number, "count_ok": div_number, "count_ok_full": div_number}))
|
|
if lenx == 1:
|
|
mlogbout.mlogb_from = mlogbin
|
|
mlogbout.number_from = mlogbwin.number
|
|
mlogbout.save()
|
|
elif material_in.tracking == Material.MA_TRACKING_SINGLE and material_out.tracking == Material.MA_TRACKING_SINGLE:
|
|
mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=m_dict)
|
|
d_count_real = 0
|
|
for mlogbwin in Mlogbw.objects.filter(mlogb=mlogbin).order_by("number"):
|
|
wpr_ = mlogbwin.wpr
|
|
for key, val in wpr_.oinfo.items():
|
|
if val["name"] == "切片数":
|
|
div_number = int(val["val"])
|
|
d_count_real = d_count_real + div_number
|
|
if div_number == 1:
|
|
Mlogbw.objects.get_or_create(wpr=wpr_, mlogb=mlogbout, defaults={"number": wpr_.number, "mlogbw_from": mlogbwin})
|
|
else:
|
|
for i in range(div_number):
|
|
Mlogbw.objects.get_or_create(mlogb=mlogbout, number=f"{wpr_.number}-{i+1}", defaults={"mlogbw_from": mlogbwin})
|
|
d_count_ok = d_count_real
|
|
mlogbout.count_real = d_count_real
|
|
mlogbout.count_ok = d_count_ok
|
|
mlogbout.count_ok_full = d_count_ok
|
|
mlogbout.save(update_fields=["count_real", "count_ok", "count_ok_full"])
|
|
elif material_in.tracking == Material.MA_TRACKING_BATCH and material_out.tracking == Material.MA_TRACKING_BATCH:
|
|
d_count_real = (mlogbin.count_use - mlogbin.count_pn_jgqbl) * div_number
|
|
d_count_ok = d_count_real
|
|
mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict, {"count_real": d_count_real, "count_ok": d_count_ok, "count_ok_full": d_count_ok}))
|
|
mlogbout.count_json_from = mlogbin.count_json_from
|
|
mlogbout.save(update_fields=["count_json_from"])
|
|
elif mtype == Process.PRO_MERGE: # 支持批到批,批到个
|
|
div_number = route.div_number
|
|
if mlogbin_parent is not None:
|
|
# 说明是次批
|
|
if mlogbin.material_in == mlogbin_parent.material_in:
|
|
# 如果主次物料一致,则进行处理
|
|
count_use_sum = Mlogb.objects.filter(Q(id=mlogbin_parent.id) | Q(parent=mlogbin_parent), material_in=mlogbin.material_in).aggregate(Sum("count_use"))["count_use__sum"] or 0
|
|
count_pn_jgqbl_sum = (
|
|
Mlogb.objects.filter(Q(id=mlogbin_parent.id) | Q(parent=mlogbin_parent), material_in=mlogbin.material_in).aggregate(Sum("count_pn_jgqbl"))["count_pn_jgqbl__sum"] or 0
|
|
)
|
|
xcount = math.floor((count_use_sum - count_pn_jgqbl_sum) / div_number)
|
|
else:
|
|
# 获取可用的辅料
|
|
if not RouteMat.objects.filter(material=mlogbin.material_in, route=route).exists():
|
|
raise ParseError("工艺步骤中不存在该辅料")
|
|
# 使用主批作为后续引用
|
|
mlogbin = mlogbin_parent
|
|
else:
|
|
xcount = math.floor((mlogbin.count_use - mlogbin.count_pn_jgqbl) / div_number)
|
|
if xcount > 0:
|
|
d_count_real = xcount
|
|
d_count_ok = xcount
|
|
number_to_batch = process.number_to_batch
|
|
if not number_to_batch:
|
|
mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict, {"count_real": d_count_real, "count_ok": d_count_ok, "count_ok_full": d_count_ok}))
|
|
mlogbout.count_json_from = mlogbin.count_json_from
|
|
mlogbout.save(update_fields=["count_json_from"])
|
|
wpr_number_rule = process.wpr_number_rule
|
|
if material_out.tracking == Material.MA_TRACKING_SINGLE:
|
|
number = mlogbin.batch
|
|
if mlogbin.number_from:
|
|
number = mlogbin.number_from
|
|
if d_count_real == 1:
|
|
if wpr_number_rule:
|
|
number = MlogbInViewSet.gen_number_with_rule(wpr_number_rule, material_out, mlog)
|
|
if number_to_batch:
|
|
mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict, {"count_real": 1, "count_ok": 1, "count_ok_full": 1, "batch": number}))
|
|
Mlogbw.objects.get_or_create(number=number, mlogb=mlogbout)
|
|
else:
|
|
if wpr_number_rule:
|
|
number_list = MlogbInViewSet.gen_number_with_rule(wpr_number_rule, material_out, mlog, gen_count=d_count_real)
|
|
for i in range(d_count_real):
|
|
if wpr_number_rule:
|
|
numberx = number_list[i]
|
|
else:
|
|
numberx = f"{number}-{i+1}"
|
|
if number_to_batch:
|
|
mlogbout, _ = Mlogb.objects.get_or_create(mlogb_from=mlogbin, defaults=update_dict(m_dict, {"count_real": 1, "count_ok": 1, "count_ok_full": 1, "batch": numberx}))
|
|
Mlogbw.objects.get_or_create(number=numberx, mlogb=mlogbout)
|
|
else:
|
|
raise ParseError("不支持生成产出物料!")
|
|
mlog.cal_mlog_count_from_mlogb()
|
|
|
|
def perform_create(self, serializer):
|
|
vdata = serializer.validated_data
|
|
mlogbin: Mlogb = serializer.save()
|
|
MlogViewSet.lock_and_check_can_update(mlogbin.mlog)
|
|
MlogbInViewSet.p_create_after(mlogbin, wprIds=vdata.get("wprIds", []))
|
|
|
|
@classmethod
|
|
def gen_number_with_rule(cls, rule, material_out: Material, mlog: Mlog, gen_count=1):
|
|
from apps.wpmw.models import Wpr
|
|
|
|
handle_date = mlog.handle_date
|
|
c_year = handle_date.year
|
|
c_year2 = str(c_year)[-2:]
|
|
c_month = handle_date.month
|
|
m_model = material_out.model
|
|
# 按生产日志查询
|
|
wpr = (
|
|
Wpr.objects.filter(
|
|
wpr_mlogbw__mlogb__material_out__isnull=False,
|
|
wpr_mlogbw__mlogb__mlog__mgroup__process=mlog.mgroup.process,
|
|
wpr_mlogbw__mlogb__mlog__is_fix=False,
|
|
wpr_mlogbw__mlogb__mlog__submit_time__isnull=False,
|
|
wpr_mlogbw__mlogb__mlog__handle_date__year=c_year,
|
|
wpr_mlogbw__mlogb__mlog__handle_date__month=c_month,
|
|
)
|
|
.order_by("number")
|
|
.last()
|
|
)
|
|
cq_w = 4
|
|
if "n_count:02d" in rule:
|
|
cq_w = 2
|
|
n_count = 0
|
|
if wpr:
|
|
try:
|
|
n_count = int(wpr.number[-cq_w:].lstrip("0"))
|
|
except Exception as e:
|
|
raise ParseError(f"获取该类产品最后编号错误: {str(e)}")
|
|
try:
|
|
if gen_count == 1:
|
|
return rule.format(c_year=c_year, c_month=c_month, m_model=m_model, n_count=n_count + 1, c_year2=c_year2)
|
|
else:
|
|
return [rule.format(c_year=c_year, c_month=c_month, m_model=m_model, n_count=n_count + i + 1, c_year2=c_year2) for i in range(gen_count)]
|
|
except Exception as e:
|
|
raise ParseError(f"个号生成错误: {e}")
|
|
|
|
|
|
class MlogbOutViewSet(BulkUpdateModelMixin, CustomGenericViewSet):
|
|
perms_map = {"put": "mlog.update"}
|
|
queryset = Mlogb.objects.filter(material_out__isnull=False)
|
|
serializer_class = MlogbOutUpdateSerializer
|
|
|
|
def perform_update(self, serializer):
|
|
ins: Mlogb = serializer.instance
|
|
mlog = MlogViewSet.lock_and_check_can_update(ins.mlog)
|
|
material_out = serializer.validated_data.get("material_out")
|
|
if material_out and material_out.tracking == Material.MA_TRACKING_SINGLE:
|
|
raise ParseError("单件产品不支持直接修改")
|
|
ins: Mlogb = serializer.save()
|
|
mlog.cal_mlog_count_from_mlogb()
|
|
|
|
|
|
class FmlogViewSet(CustomModelViewSet):
|
|
perms_map = {"get": "*", "post": "mlog.create", "put": "mlog.update", "delete": "mlog.delete"}
|
|
queryset = Fmlog.objects.all()
|
|
serializer_class = FmlogSerializer
|
|
update_serializer_class = FmlogUpdateSerializer
|
|
filterset_fields = ["mtask", "mgroup", "mtask__route", "enabled"]
|
|
select_related_fields = ["mtask", "mgroup", "mtask__route", "mtask__route__routepack", "mtask__route__material_out"]
|
|
search_fields = ["mlog_fmlog__b_mlog__batch"]
|
|
|
|
def destroy(self, request, *args, **kwargs):
|
|
ins = self.get_object()
|
|
if Mlog.objects.filter(fmlog=ins).exists():
|
|
raise ParseError("因存在二级日志不可删除")
|
|
return super().destroy(request, *args, **kwargs)
|
|
|
|
@action(methods=["post"], detail=True, perms_map={"post": "*"}, serializer_class=Serializer)
|
|
def toggle_enabled(self, request, *args, **kwargs):
|
|
ins: Fmlog = self.get_object()
|
|
ins.enabled = False if ins.enabled else True
|
|
ins.save()
|
|
return Response()
|
|
|
|
|
|
class BatchStViewSet(CustomListModelMixin, ComplexQueryMixin, CustomGenericViewSet):
|
|
"""
|
|
list: 批次统计数据
|
|
|
|
批次统计数据
|
|
"""
|
|
|
|
perms_map = {"get": "*"}
|
|
queryset = BatchSt.objects.all()
|
|
serializer_class = BatchStSerializer
|
|
select_related_fields = ["material_start"]
|
|
ordering_fields = ["batch", "last_time", "update_time"]
|
|
ordering = ["batch"]
|
|
filterset_class = BatchStFilter
|
|
|
|
|
|
class MlogbwViewSet(CustomModelViewSet):
|
|
perms_map = {"get": "*", "post": "mlog.update", "put": "mlog.update", "delete": "mlog.update"}
|
|
queryset = Mlogbw.objects.all()
|
|
serializer_class = MlogbwCreateUpdateSerializer
|
|
filterset_class = MlogbwFilter
|
|
select_related_fields = ["ftest", "equip", "wpr", "mlogb"]
|
|
ordering = ["mlogb", "number", "create_time"]
|
|
ordering_fields = ["number", "create_time", "mlogb"]
|
|
|
|
# def filter_queryset(self, queryset):
|
|
# if not self.detail and not self.request.query_params.get('mlogb', None):
|
|
# raise ParseError('请指定所属消耗/产出明细')
|
|
# return super().filter_queryset(queryset)
|
|
|
|
def perform_create(self, serializer):
|
|
ins: Mlogbw = serializer.save()
|
|
mlog: Mlog = None
|
|
if isinstance(ins, list):
|
|
insx = ins
|
|
else:
|
|
insx = [ins]
|
|
checked = False
|
|
for ins in insx:
|
|
if mlog is None:
|
|
mlog = ins.mlogb.mlog
|
|
else:
|
|
if mlog != ins.mlogb.mlog:
|
|
raise ParseError("所有记录必须属于同一张日志")
|
|
if not checked:
|
|
MlogViewSet.lock_and_check_can_update(mlog)
|
|
checked = True
|
|
wpr: Wpr = ins.wpr
|
|
mlogb: Mlogb = ins.mlogb
|
|
if wpr.wm != mlogb.wm_in:
|
|
raise ParseError("单个与所属批次不一致")
|
|
route: Route = mlogb.route if mlogb.route else mlog.route
|
|
Mlogbw.cal_count_notok(mlogb)
|
|
# 如果是输入且输出追踪到个,需同步创建
|
|
material_in: Material = mlogb.material_in
|
|
if material_in is not None:
|
|
mlogb_qs = Mlogb.objects.filter(mlogb_from=mlogb)
|
|
if route:
|
|
material_out = route.material_out
|
|
elif mlog.is_fix:
|
|
material_out = material_in
|
|
else:
|
|
raise ParseError("获取产出物料错误")
|
|
mtype = route.process.mtype if route.process else None
|
|
if mlogb_qs.exists() and material_out.tracking == Material.MA_TRACKING_SINGLE:
|
|
for mlogb in mlogb_qs:
|
|
if route.process.mtype == Process.PRO_NORMAL:
|
|
Mlogbw.objects.get_or_create(mlogb=mlogb, wpr=ins.wpr, defaults={"number": ins.number, "mlogbw_from": ins})
|
|
elif route.process.mtype == Process.PRO_DIV:
|
|
if route.div_number == 1:
|
|
Mlogbw.objects.get_or_create(wpr=ins.wpr, mlogb=mlogb, defaults={"number": ins.wpr.number, "mlogbw_from": ins})
|
|
else:
|
|
for i in range(route.div_number):
|
|
Mlogbw.objects.get_or_create(mlogb=mlogb, number=f"{ins.number}-{i+1}", defaults={"mlogbw_from": ins})
|
|
Mlogbw.cal_count_notok(mlogb)
|
|
elif mtype == Process.PRO_DIV:
|
|
mlogbin = ins.mlogb
|
|
wm_in = mlogbin.wm_in
|
|
mlog = mlogbin.mlog
|
|
div_number = route.div_number
|
|
m_dict = {
|
|
"mtask": mlogbin.mtask,
|
|
"mlog": mlog,
|
|
"batch": ins.number,
|
|
"material_out": material_out,
|
|
"batch_ofrom": wm_in.batch_ofrom,
|
|
"material_ofrom": wm_in.material_ofrom,
|
|
"count_real": div_number,
|
|
"count_ok": div_number,
|
|
"qct": mlog.qct,
|
|
}
|
|
mlogbout, _ = Mlogb.objects.get_or_create(mlogbw_from=ins, defaults=m_dict)
|
|
if material_out.tracking == Material.MA_TRACKING_SINGLE:
|
|
if div_number == 1:
|
|
Mlogbw.objects.get_or_create(wpr=ins.wpr, mlogb=mlogbout, defaults={"number": ins.wpr.number, "mlogbw_from": ins})
|
|
else:
|
|
for i in range(div_number):
|
|
Mlogbw.objects.get_or_create(mlogb=mlogbout, number=f"{ins.number}-{i+1}", defaults={"mlogbw_from": ins})
|
|
Mlogbw.cal_count_notok(mlogbout)
|
|
elif material_out.tracking == Material.MA_TRACKING_BATCH:
|
|
number_from = mlogbout.number_from
|
|
if number_from is None:
|
|
mlogbout.number_from = ins.number
|
|
mlogbout.save()
|
|
elif number_from == ins.number:
|
|
pass
|
|
else:
|
|
raise ParseError("该个号不可产生该批")
|
|
mlog.cal_mlog_count_from_mlogb()
|
|
|
|
def perform_update(self, serializer):
|
|
mlogbw = serializer.save()
|
|
if isinstance(mlogbw, list):
|
|
pass
|
|
else:
|
|
mlog = mlogbw.mlogb.mlog
|
|
mlog = MlogViewSet.lock_and_check_can_update(mlog)
|
|
Mlogbw.cal_count_notok(mlogbw.mlogb)
|
|
|
|
mlog.cal_mlog_count_from_mlogb()
|
|
|
|
def after_bulk_update(self, objs):
|
|
mlogbIds = list(set([obj["mlogb"] for obj in objs]))
|
|
for mlogbId in mlogbIds:
|
|
mlogb = Mlogb.objects.get(id=mlogbId)
|
|
mlog = MlogViewSet.lock_and_check_can_update(mlogb.mlog)
|
|
Mlogbw.cal_count_notok(mlogb)
|
|
mlog.cal_mlog_count_from_mlogb()
|
|
|
|
def perform_destroy(self, instance: Mlogbw):
|
|
mlogb: Mlogb = instance.mlogb
|
|
mlog = MlogViewSet.lock_and_check_can_update(mlogb.mlog)
|
|
if mlogb.material_out is not None and instance.wpr is not None:
|
|
raise ParseError("不能删除该产出明细")
|
|
|
|
# 如果是输入且输出追踪到个,需同步删除
|
|
material_in: Material = mlogb.material_in
|
|
need_cal_mlogb = False
|
|
if material_in is not None:
|
|
mlogbw_qs = Mlogbw.objects.filter(mlogbw_from=instance)
|
|
mlogbIds = list(mlogbw_qs.values_list("mlogb__id", flat=True))
|
|
if mlogbIds:
|
|
need_cal_mlogb = True
|
|
|
|
ftest = instance.ftest
|
|
instance.delete()
|
|
if ftest:
|
|
ftest.delete()
|
|
Mlogbw.cal_count_notok(mlogb)
|
|
|
|
if need_cal_mlogb:
|
|
mlogb_qs = Mlogb.objects.filter(id__in=mlogbIds)
|
|
for mlogb in mlogb_qs:
|
|
Mlogbw.cal_count_notok(mlogb)
|
|
|
|
mlog.cal_mlog_count_from_mlogb()
|
|
|
|
@action(methods=["post"], detail=False, perms_map={"post": "mlog.update"}, serializer_class=MlogbwStartTestSerializer)
|
|
@transaction.atomic
|
|
def start_test(self, request, *args, **kwargs):
|
|
sr = MlogbwStartTestSerializer(data=request.data)
|
|
sr.is_valid(raise_exception=True)
|
|
sr.save()
|
|
return Response()
|
|
|
|
|
|
class MlogUserViewSet(BulkCreateModelMixin, CustomListModelMixin, BulkDestroyModelMixin, CustomGenericViewSet):
|
|
perms_map = {"get": "*", "post": "mlog.update", "delete": "mlog.update"}
|
|
queryset = MlogUser.objects.all()
|
|
serializer_class = MlogUserSerializer
|
|
select_related_fields = ["handle_user", "shift", "process"]
|
|
filterset_fields = ["mlog"]
|
|
|
|
def get_queryset(self):
|
|
qs = super().get_queryset()
|
|
if self.request.method == "GET":
|
|
qs = qs.filter(mlog__id=self.request.query_params.get("mlog", None))
|
|
return qs
|
|
|
|
def perform_destroy(self, instance):
|
|
mlog: Mlog = instance.mlog
|
|
MlogViewSet.lock_and_check_can_update(mlog)
|
|
return super().perform_destroy(instance)
|
|
|
|
|
|
class BatchLogViewSet(CustomListModelMixin, CustomGenericViewSet):
|
|
perms_map = {"get": "*"}
|
|
queryset = BatchLog.objects.all()
|
|
serializer_class = BatchLogSerializer
|
|
select_related_fields = ["source", "target"]
|
|
|
|
@action(methods=["post"], detail=False, perms_map={"post": "*"}, serializer_class=Serializer)
|
|
@transaction.atomic
|
|
def dag(self, request):
|
|
"""
|
|
获取该批次的DAG图数据
|
|
|
|
获取该批次的DAG图数据
|
|
"""
|
|
batch = request.data.get("batch", None)
|
|
method = request.data.get("method", "direct")
|
|
if not batch:
|
|
raise ParseError("缺少batch参数")
|
|
return Response(get_batch_dag(batch, method))
|
|
|
|
@action(methods=["post"], detail=False, perms_map={"post": "*"}, serializer_class=Serializer)
|
|
def batches_to(self, request, *args, **kwargs):
|
|
"""获取已指向的批次号
|
|
|
|
获取已指向的批次号
|
|
"""
|
|
data = request.data
|
|
batch = data.get("batch", None)
|
|
if not batch:
|
|
raise ParseError("请指定批次号")
|
|
return Response(BatchLog.batches_to(batch=batch))
|