feat: 重构以下batchlog

This commit is contained in:
caoqianming 2025-03-25 09:44:22 +08:00
parent b9a3427162
commit 6f28056ed4
7 changed files with 152 additions and 33 deletions

View File

@ -240,7 +240,7 @@ class InmService:
BatchLog.clear(mio=instance)
else:
for item in MIOItem.objects.filter(mio=instance):
BatchSt.create(batch=item.batch, mio=instance, material_start=item.material)
BatchSt.g_create(batch=item.batch, mio=instance, material_start=item.material)
from apps.pum.services import PumService
cls.update_mb(instance, in_or_out)
PumService.mio_purin(instance, is_reverse)
@ -249,7 +249,7 @@ class InmService:
BatchLog.clear(mio=instance)
else:
for item in MIOItem.objects.filter(mio=instance):
BatchSt.create(batch=item, mio=instance, material_start=item.material)
BatchSt.g_create(batch=item, mio=instance, material_start=item.material)
cls.update_mb(instance, in_or_out)
elif instance.type == MIO.MIO_TYPE_DO_IN:
mioitems = MIOItem.objects.filter(mio=instance)

View File

@ -0,0 +1,34 @@
# Generated by Django 3.2.12 on 2025-03-25 01:21
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wpm', '0103_batchst_material_start'),
]
operations = [
migrations.RemoveField(
model_name='batchlog',
name='source_b',
),
migrations.RemoveField(
model_name='batchlog',
name='target_b',
),
migrations.AddField(
model_name='batchlog',
name='source',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='batch_s', to='wpm.batchst', verbose_name='来源批次'),
preserve_default=False,
),
migrations.AddField(
model_name='batchlog',
name='target',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='batch_t', to='wpm.batchst', verbose_name='目标批次'),
preserve_default=False,
),
]

View File

@ -593,33 +593,31 @@ class BatchSt(BaseModel):
mlog = models.ForeignKey(Mlog, verbose_name='由何日志创建', on_delete=models.CASCADE, null=True, blank=True)
@classmethod
def create(cls, batch:str, mio=None, handover=None, mlog=None, material_start=None):
def g_create(cls, batch:str, mio=None, handover=None, mlog=None, material_start=None):
"""
创建新的批次
"""
if BatchSt.objects.filter(batch=batch).exists():
raise ParseError(f"{batch} 该批号已存在不可用")
if mio is None and handover is None and mlog is None:
raise ParseError("mio or handover or mlog must be provided")
BatchSt.objects.create(batch=batch, mio=mio, handover=handover, mlog=mlog, material_start=material_start)
if mio is None and handover is None and mlog is None and material_start is None:
return cls.objects.get_or_create(batch=batch)
else:
if mio is None and handover is None and mlog is None:
raise ParseError("mio or handover or mlog must be provided")
ins = cls.objects.create(batch=batch, mio=mio, handover=handover, mlog=mlog, material_start=material_start)
return ins, True
class BatchLog(BaseModel):
"""
TN: 拆合批变更记录
"""
# source = models.ForeignKey(BatchSt, verbose_name='来源批次', on_delete=models.CASCADE, related_name="batch_p")
# target = models.ForeignKey(BatchSt, verbose_name='目标批次', on_delete=models.CASCADE, related_name="batch_c")
source_b = models.TextField("来源批次", db_index=True)
target_b = models.TextField("目标批次", db_index=True)
source = models.ForeignKey(BatchSt, verbose_name='来源批次', on_delete=models.CASCADE, related_name="batch_s")
target = models.ForeignKey(BatchSt, verbose_name='目标批次', on_delete=models.CASCADE, related_name="batch_t")
handover = models.ForeignKey(Handover, verbose_name='关联交接记录', on_delete=models.CASCADE, null=True, blank=True)
mlog = models.ForeignKey(Mlog, verbose_name='关联生产记录', on_delete=models.CASCADE, null=True, blank=True)
relation_type = models.CharField('关联类型', max_length=20, help_text="split/merge", default="split")
@classmethod
def g_create(cls, source_b:str, target_b:str=None, relation_type="split", handover=None, mlog=None):
def g_create(cls, source:str, target:str=None, relation_type="split", handover=None, mlog=None):
"""
创建新的关系
"""
@ -627,7 +625,7 @@ class BatchLog(BaseModel):
raise ParseError("relation_type must be split or merge")
if handover is None and mlog is None:
raise ParseError("handover or mlog must be provided")
cls.objects.get_or_create(source_b=source_b, target_b=target_b, relation_type=relation_type, handover=handover, mlog=mlog)
return cls.objects.get_or_create(source=source, target=target, relation_type=relation_type, handover=handover, mlog=mlog)
@classmethod
def clear(cls, handover=None, mlog=None, mio=None):

View File

@ -6,7 +6,7 @@ from datetime import datetime
from .models import (SfLog, StLog, SfLogExp, WMaterial, Mlog,
Handover, Handoverb, Mlogb, AttLog,
OtherLog, Fmlog, BatchSt, Mlogbw, Handoverbw, MlogbDefect, MlogUser)
OtherLog, Fmlog, BatchSt, Mlogbw, Handoverbw, MlogbDefect, MlogUser, BatchLog)
from apps.system.models import Dept, User
from apps.system.serializers import UserSimpleSerializer
from apps.pm.models import Mtask, Mtaskb
@ -1208,4 +1208,13 @@ class MlogUserSerializer(CustomModelSerializer):
raise ParseError("该日志没有工艺步骤")
if MlogUser.objects.filter(mlog=mlog, process=process).exists():
raise ParseError("该工序已选择")
return super().create(validated_data)
return super().create(validated_data)
class BatchLogSerializer(CustomModelSerializer):
source_batch = serializers.CharField(source='source.batch', read_only=True)
target_batch = serializers.CharField(source='target.batch', read_only=True)
class Meta:
model = BatchLog
fields = "__all__"

View File

@ -171,11 +171,13 @@ def mlog_submit(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
m_outs = Mlogb.objects.filter(mlog=mlog, material_out__isnull=False)
for item in m_outs:
if item.mlogb_from and item.batch != item.mlogb_from.batch:
BatchSt.create(batch=item.batch, mlog=mlog, material_start=item.material_out)
BatchLog.g_create(source_b=item.mlogb_from.batch, target_b=item.batch, mlog=mlog)
target, _ = BatchSt.g_create(batch=item.batch, mlog=mlog, material_start=item.material_out)
source, _ = BatchSt.g_create(batch=item.mlogb_from.batch)
BatchLog.g_create(source= source, target=target, mlog=mlog)
if item.mlogbw_from and item.batch != item.mlogbw_from.mlogb.batch:
BatchSt.create(batch=item.batch, mlog=mlog, material_start=item.material_out)
BatchLog.g_create(source_b=item.mlogbw_from.mlogb.batch, target_b=item.batch, mlog=mlog)
target, _ = BatchSt.g_create(batch=item.batch, mlog=mlog, material_start=item.material_out)
source, _ = BatchSt.g_create(batch=item.mlogbw_from.mlogb.batch)
BatchLog.g_create(source=source, target=target, mlog=mlog)
if material_in or is_fix: # 需要进行车间库存管理
m_ins_list = []
@ -718,13 +720,15 @@ def handover_submit(handover:Handover, user: User, now: Union[datetime.datetime,
if mtype == Handover.H_MERGE:
batch = new_batch
if create_new_batch is False:
BatchSt.create(batch=batch, handover=handover, material_start=material)
target, _ = BatchSt.g_create(batch=batch, handover=handover, material_start=material)
create_new_batch = True
BatchLog.g_create(source_b=wm_from.batch, target_b=batch, handover=handover, relation_type="merge")
source, _ = BatchLog.g_create(batch=wm_from.batch)
BatchLog.g_create(source=source, target=target, handover=handover, relation_type="merge")
elif mtype == Handover.H_DIV:
batch = handover_or_b.batch
BatchSt.create(batch=batch, handover=handover, material_start=material)
BatchLog.g_create(source_b=handover.wm.batch, target_b=batch, handover=handover, relation_type="split")
target, _ = BatchSt.g_create(batch=batch, handover=handover, material_start=material)
source, _ = BatchSt.g_create(batch=handover.wm.batch)
BatchLog.g_create(source=source, target=target, handover=handover, relation_type="split")
else:
batch = wm_from.batch
batches.append(batch)
@ -897,3 +901,55 @@ def mlog_audit_end(ticket: Ticket):
ins = Mlog.objects.get(id=ticket.ticket_data['t_id'])
mlog_submit(ins, ticket.create_by, now)
def get_batch_dag(batch_number: str):
try:
batch_ins = BatchSt.objects.get(batch=batch_number)
except Exception:
raise ParseError("该批次号未构建关系链")
# 收集所有相关批次和边
nodes_set = {batch_ins.id}
edges = []
prev_size = 0
while len(nodes_set) > prev_size:
prev_size = len(nodes_set)
# 查询所有与当前批次相关的记录作为source或target
logs = BatchLog.objects.filter(Q(source_b__in=nodes_set) | Q(target_b__in=nodes_set)).order_by("create_time")
# 处理每条记录,扩展节点和边
for log in logs:
source = log.source.id
target = log.target.id
nodes_set.update([source, target])
edges.append({
'source': source,
'target': target,
'label': log.relation_type, # 使用relation_type作为边的标签
})
# 去重边
unique_edges = {}
for edge in edges:
key = (edge['source'], edge['target'])
if key not in unique_edges:
unique_edges[key] = edge
# 将批次号排序
nodes_qs = BatchSt.objects.filter(id__in=nodes_set).order_by('id')
# batch_to_id = {batch: idx for idx, batch in enumerate(nodes_list)}
# 构建节点数据,默认使用'rect'形状
nodes = [{
'id': item.id,
'label': item.batch,
'shape': 'rect' # 可根据业务需求调整形状
} for item in nodes_qs]
# 构建边数据
edges_converted = [{
'source': edge['source'],
'target': edge['target'],
'label': edge['label']
} for edge in unique_edges.values()]
return {'nodes': nodes, 'edges': edges_converted}

View File

@ -5,7 +5,7 @@ from apps.wpm.views import (SfLogViewSet, StLogViewSet, SfLogExpViewSet,
WMaterialViewSet, MlogViewSet, HandoverViewSet,
AttlogViewSet, OtherLogViewSet, MlogbViewSet, MlogbInViewSet,
MlogbOutViewSet, FmlogViewSet, BatchStViewSet,
MlogbwViewSet, MlogUserViewSet)
MlogbwViewSet, MlogUserViewSet, BatchLogViewSet)
from apps.wpm.datax import AnaViewSet
@ -29,6 +29,7 @@ router.register('ana', AnaViewSet, basename='ana')
router.register('batchst', BatchStViewSet, basename='batchst')
router.register('mlogbw', MlogbwViewSet, basename='mlogbw')
router.register('mloguser', MlogUserViewSet, basename='mloguser')
router.register('batchlog', BatchLogViewSet, basename='batchlog')
urlpatterns = [
path(API_BASE_URL, include(router.urls)),
]

View File

@ -16,7 +16,7 @@ from apps.utils.mixins import CustomListModelMixin, BulkCreateModelMixin, BulkDe
from .filters import StLogFilter, SfLogFilter, WMaterialFilter, MlogFilter, HandoverFilter, MlogbFilter, BatchStFilter
from .models import (SfLog, SfLogExp, StLog, WMaterial, Mlog, Handover, Mlogb,
Mlogbw, AttLog, OtherLog, Fmlog, BatchSt, MlogbDefect, MlogUser)
Mlogbw, AttLog, OtherLog, Fmlog, BatchSt, MlogbDefect, MlogUser, BatchLog)
from .serializers import (SflogExpSerializer, SfLogSerializer, StLogSerializer, WMaterialSerializer,
MlogRevertSerializer,
MlogSerializer, MlogRelatedSerializer, DeptBatchSerializer, HandoverSerializer,
@ -26,8 +26,8 @@ from .serializers import (SflogExpSerializer, SfLogSerializer, StLogSerializer,
MlogbDetailSerializer, MlogbInSerializer, MlogbInUpdateSerializer,
MlogbOutUpdateSerializer, FmlogSerializer, FmlogUpdateSerializer, BatchStSerializer,
MlogbwCreateUpdateSerializer, HandoverMgroupSerializer, MlogListSerializer,
MlogbSerializer, MlogUserSerializer)
from .services import mlog_submit, handover_submit, mlog_revert
MlogbSerializer, MlogUserSerializer, BatchLogSerializer)
from .services import mlog_submit, handover_submit, mlog_revert, get_batch_dag
from apps.wpm.services import mlog_submit_validate, generate_new_batch
from apps.wf.models import State
from apps.wpmw.models import Wpr
@ -786,13 +786,34 @@ class MlogUserViewSet(BulkCreateModelMixin, ListModelMixin, DestroyModelMixin, C
filterset_fields = ["mlog"]
def get_queryset(self):
qs = super().get_queryset()
mlog = self.request.query_params.get("mlog", None)
if not mlog:
raise ParseError("缺少mlog查询参数")
return super().get_queryset()
return qs.none()
return qs
def perform_destroy(self, instance):
mlog:Mlog = instance.mlog
if mlog.submit_time is not None:
raise ParseError("不能删除该记录")
return super().perform_destroy(instance)
return super().perform_destroy(instance)
class BatchLogViewSet(ListModelMixin, CustomGenericViewSet):
perms_map = {"get": "*"}
queryset = BatchLog.objects.all()
serializer_class = BatchLogSerializer
select_related_fields = ["source", "target"]
@action(methods=['post'], detail=False, perms_map={'post': '*'}, serializer_class=Serializer)
@transaction.atomic
def dag(self, request):
"""
获取该批次的DAG图数据
获取该批次的DAG图数据
"""
batch = request.data.get("batch", None)
if not batch:
raise ParseError("缺少batch参数")
return Response(get_batch_dag(batch))