factory/apps/wpm/services.py

1022 lines
46 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import datetime
from django.core.cache import cache
from django.db.models import Sum
from django.utils import timezone
from typing import Union
from rest_framework.exceptions import ParseError
from apps.system.models import User
from apps.pm.models import Mtask
from apps.mtm.models import Mgroup, Shift, Material, Route, RoutePack, Team, Srule
from .models import SfLog, WMaterial, Mlog, Mlogb, Mlogbw, Handover, Handoverb, Handoverbw, MlogbDefect, BatchLog, BatchSt
from apps.mtm.services_2 import cal_material_count
from apps.wf.models import Ticket
from apps.utils.thread import MyThread
import logging
from apps.wpm.services_2 import get_alldata_with_batch_and_store
from datetime import timedelta
from apps.wpmw.models import Wpr, WprDefect
from ..qm.models import Defect, Ftest
from django.db.models import Count, Q
from apps.utils.tasks import ctask_run
from apps.mtm.models import Process
from apps.mtm.services_2 import cal_material_count
myLogger = logging.getLogger('log')
def generate_new_batch(old_batch: str, mlog: Mlog):
new_batch = old_batch
supplier = mlog.supplier
mgroup = mlog.mgroup
process = mgroup.process
if mgroup.batch_append_code:
if mgroup.code:
new_batch = f'{new_batch}>{mgroup.code}'
if mlog.mtype == Mlog.MTYPE_OUT:
supplier_number = supplier.number if supplier else ''
if supplier_number:
new_batch = f'{new_batch}>{supplier_number}'
elif process.batch_append_equip:
number = mlog.equipment.number if mlog.equipment else ''
if number:
new_batch = f'{new_batch}>{number}'
if mlog.index:
new_batch = f'{new_batch}>{mlog.index}'
return new_batch
def find_material_can_change(material: Material, mgroup_to: Mgroup):
"""
找到可转变为的物料(返工交接用)
"""
routepackIds = Route.objects.filter(material_out=material, routepack__state=RoutePack.RP_S_CONFIRM).values_list('routepack', flat=True)
matIds = list(Route.objects.filter(routepack__id__in=routepackIds, process=mgroup_to.process, material_in__type__in=[Material.MA_TYPE_HALFGOOD, Material.MA_TYPE_GOOD]).values_list('material_in', flat=True).distinct())
if matIds and len(matIds) == 1:
return Material.objects.get(id=matIds[0])
raise ParseError("无法返工到该工段")
def get_sflog(mgroup: Mgroup, happen_time: datetime):
sflog = SfLog.objects.filter(
start_time__lte=happen_time, end_time__gt=happen_time, mgroup=mgroup).order_by('-start_time').first()
if sflog is None: # 需要创建值班记录
make_sflogs(mgroup=mgroup, start_date=(
happen_time-datetime.timedelta(days=2)).date(), end_date=(happen_time+datetime.timedelta(days=1)).date())
sflog = SfLog.objects.filter(
start_time__lte=happen_time, end_time__gt=happen_time, mgroup=mgroup).order_by('-start_time').first()
return sflog
def get_team_x(sflog: SfLog):
srule = Srule.objects.filter(belong_dept=sflog.mgroup.belong_dept).first()
if srule and srule.rule:
rule = srule.rule
rule_compare = rule + rule
teamId_list = (SfLog.objects.filter(mgroup=sflog.mgroup, start_time__lt=sflog.start_time, start_time__gte=sflog.start_time - timedelta(days=5)).order_by('-start_time').values_list('team_id', flat=True)[:3])[::-1]
if None in teamId_list:
pass
else:
teamId_list_len = len(teamId_list)
for i in range(len(rule_compare)-teamId_list_len+1):
if rule_compare[i:i+teamId_list_len] == teamId_list:
teamId = rule_compare[i+teamId_list_len]
team = Team.objects.filter(id=teamId).first()
return team
return None
def make_sflogs(mgroup: Mgroup, start_date: datetime.date, end_date: datetime.date, create_by=None):
shift_rule = mgroup.shift_rule
shifts = Shift.objects.filter(rule=shift_rule).order_by('sort') # 根据排班规则制定排班记录
current_date = start_date
while current_date <= end_date:
for shift in shifts:
start_time_o = shift.start_time_o
end_time_o = shift.end_time_o
start_time = datetime.datetime.combine(current_date, start_time_o)
end_time = datetime.datetime.combine(current_date, end_time_o)
# 以下代码是解决跨天排班时生成当天班次缺少的bug
if start_time > end_time:
end_time += datetime.timedelta(days=1)
total_sec = (end_time - start_time).total_seconds()
# 创建SfLog记录
sflog, is_created = SfLog.objects.get_or_create(mgroup=mgroup, shift=shift, start_time=start_time, defaults={
"mgroup": mgroup,
"shift": shift,
"work_date": start_time.date(),
"start_time": start_time,
"end_time": end_time,
"total_sec_now": total_sec,
"total_sec": total_sec,
"create_by": create_by
})
if is_created:
team = get_team_x(sflog)
if team:
sflog.team = team
sflog.leader = team.leader
sflog.save()
current_date = current_date + datetime.timedelta(days=1)
def get_pcoal_heat(year_s: int, month_s: int, day_s: int):
"""
获取煤粉热值
只有回转窑需要录入煤粉热值
"""
key = f'pcoal_heat_{year_s}_{month_s}_{day_s}'
pcoal_heat = cache.get(key)
if pcoal_heat is not None and pcoal_heat > 0:
return pcoal_heat
else:
try:
qs = SfLog.objects.get(work_date__year=year_s, work_date__month=month_s, work_date__day=day_s,
mgroup__name='回转窑', shift__name__in=['白班', '早班']) # hardcode
if qs.pcoal_heat is None or qs.pcoal_heat <=0:
s = SfLog.objects.filter(pcoal_heat__isnull=False).order_by('-start_time').first()
if s:
qs.pcoal_heat = s.pcoal_heat
else:
qs.pcoal_heat = 25000
qs.save(update_fields=['pcoal_heat'])
cache.set(key, qs.pcoal_heat, timeout=60 * 60 * 8)
return qs.pcoal_heat
except Exception as e:
myLogger.error(f'获取煤粉热值失败,{e}, {year_s}, {month_s}, {day_s}', exc_info=True)
return 25000
def mlog_submit(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
"""
生产日志提交后需要执行的操作
"""
if mlog.submit_time is not None:
return
if now is None:
now = timezone.now()
if mlog.handle_date is None:
raise ParseError('请选择结束/操作时间')
if now.date() < mlog.handle_date:
raise ParseError('不可提交未来的日志')
mgroup = mlog.mgroup
process = mgroup.process
into_wm_mgroup = process.into_wm_mgroup
need_store_notok = process.store_notok
belong_dept = mgroup.belong_dept
material_out: Material = mlog.material_out
material_in: Material = mlog.material_in
supplier = mlog.supplier # 外协
is_fix = mlog.is_fix
m_ins_list = []
m_ins_bl_list = []
if is_fix:
can_matoutIds = process.get_canout_mat_ids()
# 建立关系链
m_outs = Mlogb.objects.filter(mlog=mlog, material_out__isnull=False)
for item in m_outs:
if item.mlogb_from and item.batch != item.mlogb_from.batch:
target, _ = BatchSt.g_create(batch=item.batch, mlog=mlog, material_start=item.material_out)
source, _ = BatchSt.g_create(batch=item.mlogb_from.batch)
BatchLog.g_create(source= source, target=target, mlog=mlog)
if item.mlogbw_from and item.batch != item.mlogbw_from.mlogb.batch:
target, _ = BatchSt.g_create(batch=item.batch, mlog=mlog, material_start=item.material_out)
source, _ = BatchSt.g_create(batch=item.mlogbw_from.mlogb.batch)
BatchLog.g_create(source=source, target=target, mlog=mlog)
if material_in or is_fix: # 需要进行车间库存管理
m_ins_list = []
m_ins = Mlogb.objects.filter(mlog=mlog, material_in__isnull=False)
if m_ins.exists():
m_ins = m_ins.filter(need_inout=False)
m_ins_list = [(mi.material_in, mi.batch if mi.batch else mi.batch, mi.count_use, None, mi) for mi in m_ins.all()]
for item in m_ins:
mbd_qs = MlogbDefect.get_defect_qs_from_mlogb(item)
for itemx in mbd_qs:
if itemx.defect:
m_ins_bl_list.append((item.material_in, item.batch, itemx.count, itemx.defect, item))
else:
m_ins_list = [(material_in, mlog.batch, mlog.count_use, None, mlog)]
for mi in m_ins_list:
mi_ma, mi_batch, mi_count, defect, mlog_or_b = mi
if mi_count <= 0:
raise ParseError('存在非正数!')
# 需要判断领用数是否合理
# 优先使用工段库存
if isinstance(mlog_or_b, Mlogb) and mlog_or_b.wm_in:
wm_qs = WMaterial.objects.filter(id=mlog_or_b.wm_in.id)
else:
wm_qs = WMaterial.objects.filter(batch=mi_batch, material=mi_ma, mgroup=mgroup, state=WMaterial.WM_OK)
if not wm_qs.exists():
wm_qs = WMaterial.objects.filter(batch=mi_batch, material=mi_ma,
belong_dept=belong_dept, mgroup=None, state=WMaterial.WM_OK)
count_x = wm_qs.count()
if count_x == 1:
wm = wm_qs.first()
elif count_x == 0:
raise ParseError(
f'{str(mi_ma)}-{mi_batch}-批次库存不存在!')
else:
raise ParseError(
f'{str(mi_ma)}-{mi_batch}-存在多个相同批次!')
if mi_count > wm.count:
raise ParseError(
f'{str(mi_ma)}-{mi_batch}-该批次车间库存不足!')
else:
wm.count = wm.count - mi_count
wm.update_by = user
wm.save()
if mi_ma.tracking == Material.MA_TRACKING_SINGLE:
mlogbws = Mlogbw.objects.filter(mlogb=mlog_or_b)
if mlogbws.count() != mi_count:
raise ParseError("日志与明细数量不一致,操作失败")
for item in mlogbws:
if item.ftest:
raise ParseError("不支持消耗物料的检验")
Wpr.change_or_new(wpr=item.wpr, old_wm=wm)
# 针对加工前不良的暂时额外处理
if need_store_notok:
for item in m_ins_bl_list:
material, batch, count, defect, mi_ = item
if count <= 0:
raise ParseError('存在非正数!')
lookup = {'batch': batch, 'material': material, 'mgroup': mgroup, 'defect': defect, 'state': WMaterial.WM_NOTOK}
wm, is_create = WMaterial.objects.get_or_create(**lookup, defaults={"belong_dept": belong_dept})
wm.count = wm.count + count
if is_create:
wm.create_by = user
wm.batch_ofrom = mi_.batch_ofrom
wm.material_ofrom = mi_.material_ofrom
wm.update_by = user
wm.save()
if material.tracking == Material.MA_TRACKING_SINGLE:
raise ParseError("加工前不良的物料暂不支持单件追踪")
if material_out or is_fix: # 需要入车间库存
mlogb_out_qs = Mlogb.objects.filter(mlog=mlog, material_out__isnull=False)
stored_notok = need_store_notok
stored_mgroup = need_store_notok
if mlogb_out_qs.exists():
mlogb_out_qs = mlogb_out_qs.filter(need_inout=True)
m_outs_list = [(mo.material_out, mo.batch if mo.batch else mlog.batch, mo.count_ok_full if mo.count_ok_full is not None else mo.count_ok, mlog.count_real_eweight, None, mo) for mo in mlogb_out_qs.all()]
if need_store_notok:
for item in mlogb_out_qs:
mbd_qs = MlogbDefect.get_defect_qs_from_mlogb(item)
if item.qct is not None or mbd_qs.exists():
# if item.material_out.tracking == Material.MA_TRACKING_SINGLE:
# Mlogbw.cal_count_notok(item)
for itemx in mbd_qs:
m_outs_list.append((item.material_out, item.batch, itemx.count, 0, itemx.defect, item))
# # 获取所有主要的不合格项/先暂时保留
# bw_qs = Mlogbw.objects.filter(mlogb=item)
# defectIds= Ftest.objects.filter(mlogbw_ftest__in=bw_qs).exclude(defect_main=None).values_list("defect_main__id", flat=True).distinct()
# defects_map = {d.id: d for d in Defect.objects.filter(id__in=defectIds)}
# # 过滤并统计相关数据
# filtered_bw_qs = bw_qs.filter(
# ftest__defect_main__id__in=defects_map.keys()
# ).values('ftest__defect_main__id').annotate(xcount=Count('id'))
# # 整理结果
# for defect_data in filtered_bw_qs:
# defect_id = defect_data['ftest__defect_main__id']
# xcount = defect_data['xcount']
# if xcount > 0:
# defect = defects_map[defect_id]
# m_outs_list.append((item.material_out, item.batch, xcount, 0, defect, item))
else:
for f in Mlogb._meta.fields:
if 'count_n_' in f.name and getattr(item, f.name) > 0:
notok_sign = f.name.replace('count_n_', '')
m_outs_list.append( (item.material_out, item.batch if item.batch else mlog.batch, getattr(item, f.name), mlog.count_real_eweight, notok_sign, item))
stored_notok = True
# 这里有一个漏洞在产出物为兄弟件的情况下不合格品的数量是记录在mlog上的
# 而不是mlogb上以上的额外处理就没有效果了, 不过光子不记录不合格品
else:
m_outs_list = [(material_out, mlog.batch, mlog.count_ok, mlog.count_real_eweight, None, mlog)]
# 一次填写的暂时不处理不合格品
for mo in m_outs_list:
mo_ma, mo_batch, mo_count, mo_count_eweight, notok_sign_or_defect, mlog_or_b = mo
if mo_count < 0:
raise ParseError('存在负数!')
elif mo_count == 0:
continue
if is_fix:
wm_state = WMaterial.WM_REPAIRED
if mo_ma.id in can_matoutIds:
wm_state = WMaterial.WM_OK if notok_sign_or_defect is None or (
isinstance(notok_sign_or_defect, Defect) and notok_sign_or_defect.okcate in [Defect.DEFECT_OK, Defect.DEFECT_OK_B]
) else WMaterial.WM_NOTOK
else:
wm_state = WMaterial.WM_OK if notok_sign_or_defect is None or (
isinstance(notok_sign_or_defect, Defect) and notok_sign_or_defect.okcate in [Defect.DEFECT_OK, Defect.DEFECT_OK_B]
) else WMaterial.WM_NOTOK
lookup = {'batch': mo_batch, 'material': mo_ma, 'mgroup': None,
'notok_sign': None, 'defect': None, 'state': wm_state}
if isinstance(notok_sign_or_defect, Defect):
lookup['defect'] = notok_sign_or_defect
elif notok_sign_or_defect is not None:
lookup['notok_sign'] = notok_sign_or_defect
if into_wm_mgroup:
lookup['mgroup'] = mgroup
stored_mgroup = True
else:
lookup['belong_dept'] = belong_dept
wm, is_create2 = WMaterial.objects.get_or_create(**lookup, defaults={**lookup, "belong_dept": belong_dept})
wm.count = wm.count + mo_count
wm.count_eweight = mo_count_eweight
wm.update_by = user
if supplier is not None:
wm.supplier = supplier
if is_create2:
wm.create_by = user
if isinstance(mlog_or_b, Mlog) and mlog_or_b.wm_in:
wm.batch_ofrom = mlog_or_b.wm_in.batch_ofrom
wm.material_ofrom = mlog_or_b.wm_in.material_ofrom
elif isinstance(mlog_or_b, Mlogb):
wm.batch_ofrom = mlog_or_b.batch_ofrom
wm.material_ofrom = mlog_or_b.material_ofrom
wm.save()
if mo_ma.tracking == Material.MA_TRACKING_SINGLE:
if notok_sign_or_defect:
mlogbws = Mlogbw.objects.filter(mlogb=mlog_or_b, ftest__is_ok=False, ftest__defect_main=notok_sign_or_defect)
else:
mlogbws = Mlogbw.objects.filter(Q(ftest=None) | Q(ftest__is_ok=True), mlogb=mlog_or_b)
mlogbws_count = mlogbws.count()
if mlogbws_count != mo_count:
raise ParseError("日志与明细数量不一致,操作失败")
for item in mlogbws:
if item.wpr:
Wpr.change_or_new(wpr=item.wpr, wm=wm, ftest=item.ftest)
else:
wpr_from = None
if item.mlogbw_from:
wpr_from = item.mlogbw_from.wpr
wpr = Wpr.change_or_new(number=item.number,
wm=wm, ftest=item.ftest,
wpr_from=wpr_from, batch_from=item.mlogb.batch)
item.wpr = wpr
item.save()
mlog.submit_time = now
mlog.submit_user = user
mlog.stored_notok = stored_notok
mlog.stored_mgroup = stored_mgroup
mlog.save()
# 更新任务进度
cal_mtask_progress_from_mlog(mlog)
# 更新物料数量
cal_material_count_from_mlog(mlog)
# 触发批次统计分析
if mlog.batch:
MyThread(target=get_alldata_with_batch_and_store, args=(mlog.batch,)).start()
def mlog_revert(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
"""日志撤回
"""
if mlog.submit_time is None:
return
if now is None:
now = timezone.now()
mgroup = mlog.mgroup
process = mgroup.process
belong_dept = mgroup.belong_dept
material_out:Material = mlog.material_out
material_in:Material = mlog.material_in
stored_notok = mlog.stored_notok
stored_mgroup = mlog.stored_mgroup
is_fix = mlog.is_fix
if is_fix:
can_matoutIds = process.get_canout_mat_ids()
# 先回退产物
if material_out or is_fix: # 产物退回
# 有多个产物的情况
# 需要考虑不合格品退回的情况
mlogb_out_qs = Mlogb.objects.filter(mlog=mlog, material_out__isnull=False)
if mlogb_out_qs.exists():
mlogb_out_qs = mlogb_out_qs.filter(need_inout=True)
m_outs_list = [
(mo.material_out, mo.batch if mo.batch else mlog.batch, mo.count_ok_full if mo.count_ok_full is not None else mo.count_ok, mlog.count_real_eweight, None, mo)
for mo in mlogb_out_qs.all()]
if stored_notok:
for item in mlogb_out_qs:
mbd_qs = MlogbDefect.get_defect_qs_from_mlogb(item)
if item.qct is not None or mbd_qs.exists():
# if item.material_out.tracking == Material.MA_TRACKING_SINGLE:
# Mlogbw.cal_count_notok(item)
for itemx in mbd_qs:
m_outs_list.append((item.material_out, item.batch, itemx.count, 0, itemx.defect, item))
# if item.material_out.tracking == Material.MA_TRACKING_SINGLE:
# # 获取所有主要的不合格项
# bw_qs = Mlogbw.objects.filter(mlogb=item)
# defectIds= Ftest.objects.filter(mlogbw_ftest__in=bw_qs).exclude(defect_main=None).values_list("defect_main__id", flat=True).distinct()
# defects_map = {d.id: d for d in Defect.objects.filter(id__in=defectIds)}
# # 过滤并统计相关数据
# filtered_bw_qs = bw_qs.filter(
# ftest__defect_main__id__in=defects_map.keys()
# ).values('ftest__defect_main__id').annotate(xcount=Count('id'))
# # 整理结果
# for defect_data in filtered_bw_qs:
# defect_id = defect_data['ftest__defect_main__id']
# xcount = defect_data['xcount']
# if xcount > 0:
# defect = defects_map[defect_id]
# m_outs_list.append((item.material_out, item.batch, xcount, 0, defect, item))
else:
for f in Mlogb._meta.fields:
if 'count_n_' in f.name and getattr(item, f.name) > 0:
notok_sign = f.name.replace('count_n_', '')
m_outs_list.append((item.material_out, item.batch if item.batch else mlog.batch,
getattr(item, f.name), mlog.count_real_eweight, notok_sign, item))
else:
m_outs_list = [(material_out, mlog.batch, mlog.count_ok, mlog.count_real_eweight, None, mlog)]
# 一次填写的暂时不处理不合格品
for mo in m_outs_list:
mo_ma, mo_batch, mo_count, _, notok_sign_or_defect, mlog_or_b = mo
if mo_count < 0:
raise ParseError('存在负数!')
elif mo_count == 0:
continue
if is_fix:
wm_state = WMaterial.WM_REPAIRED
if mo_ma.id in can_matoutIds:
wm_state = WMaterial.WM_OK if notok_sign_or_defect is None or (
isinstance(notok_sign_or_defect, Defect) and notok_sign_or_defect.okcate in [Defect.DEFECT_OK, Defect.DEFECT_OK_B]
) else WMaterial.WM_NOTOK
else:
wm_state = WMaterial.WM_OK if notok_sign_or_defect is None or (
isinstance(notok_sign_or_defect, Defect) and notok_sign_or_defect.okcate in [Defect.DEFECT_OK, Defect.DEFECT_OK_B]
) else WMaterial.WM_NOTOK
lookup = {'batch': mo_batch, 'material': mo_ma, 'mgroup': None, 'notok_sign': None, 'defect': None, 'state': wm_state}
if isinstance(notok_sign_or_defect, Defect):
lookup['defect'] = notok_sign_or_defect
else:
lookup['notok_sign'] = notok_sign_or_defect
if stored_mgroup:
lookup['mgroup'] = mgroup
else:
lookup['belong_dept'] = belong_dept
wm_qs = WMaterial.objects.filter(**lookup)
count_x = wm_qs.count()
if count_x == 1:
wm = wm_qs.first()
elif count_x == 0:
raise ParseError(
f'{str(mo_ma)}-{mo_batch}-批次库存不存在!')
else:
raise ParseError(
f'{str(mo_ma)}-{mo_batch}-存在多个相同批次!')
wm.count = wm.count - mo_count
if wm.count < 0:
raise ParseError('车间库存不足, 产物无法回退')
elif wm.count >= 0:
wm.update_by = user
wm.save()
if mo_ma.tracking == Material.MA_TRACKING_SINGLE:
if notok_sign_or_defect:
mlogbws = Mlogbw.objects.filter(mlogb=mlog_or_b, ftest__is_ok=False, ftest__defect_main=notok_sign_or_defect)
else:
mlogbws = Mlogbw.objects.filter(Q(ftest=None) | Q(ftest__is_ok=True), mlogb=mlog_or_b)
if mlogbws.count() != mo_count:
raise ParseError("日志与明细数量不一致,操作失败")
for item in mlogbws:
Wpr.change_or_new(wpr=item.wpr, old_wm=wm)
# 再生成消耗
if material_in or is_fix: # 领用数退回
m_ins_list = []
m_ins_bl_list = []
into_wm_mgroup = process.into_wm_mgroup
m_ins = Mlogb.objects.filter(mlog=mlog, material_in__isnull=False)
if m_ins.exists():
m_ins = m_ins.filter(need_inout=True)
for mi in m_ins.all():
m_ins_list.append((mi.material_in, mi.batch, mi.count_use, None, mi))
for item in m_ins:
mbd_qs = MlogbDefect.get_defect_qs_from_mlogb(item)
for itemx in mbd_qs:
if itemx.defect:
m_ins_bl_list.append((item.material_in, item.batch, itemx.count, itemx.defect, item))
else:
m_ins_list = [(material_in, mlog.batch, mlog.count_use, mlog.wm_in, mlog)]
for mi in m_ins_list:
mi_ma, mi_batch, mi_count, defect_or, mlog_or_b = mi
if mi_count <= 0:
raise ParseError('存在非正数!')
if isinstance(mlog_or_b, Mlogb) and mlog_or_b.wm_in:
wm = WMaterial.objects.get(id=mlog_or_b.wm_in.id)
else:
# 针对光子的情况实际上必须需要wm_in
lookup = {'batch': mi_batch, 'material': mi_ma, 'mgroup': None, 'state': WMaterial.WM_OK}
if into_wm_mgroup:
# 退回到本工段
lookup['mgroup'] = mgroup
else:
lookup['belong_dept'] = belong_dept
wm, _ = WMaterial.objects.get_or_create(**lookup, defaults={**lookup, "belong_dept": belong_dept})
wm.count = wm.count + mi_count
wm.update_by = user
wm.save()
if mi_ma.tracking == Material.MA_TRACKING_SINGLE:
mlogbws = Mlogbw.objects.filter(mlogb=mlog_or_b)
if mlogbws.count() != mi_count:
raise ParseError("日志与明细数量不一致,操作失败")
for item in mlogbws:
# if item.wpr:
Wpr.change_or_new(wpr=item.wpr, wm=wm)
# else:
# wpr = Wpr.change_or_new(number=item.number, wm=wm)
# item.wpr = wpr
# item.save()
# 针对加工前不良的暂时额外处理
if stored_notok:
for item in m_ins_bl_list:
material, batch, count, defect, mi_ = item
if count <= 0:
raise ParseError('存在非正数!')
lookup = {'batch': batch, 'material': material, 'mgroup': mgroup, 'defect': defect, 'state': WMaterial.WM_NOTOK}
wm, is_create = WMaterial.objects.get_or_create(**lookup, defaults={**lookup, "belong_dept": belong_dept})
wm.count = wm.count - count
if wm.count < 0:
raise ParseError('加工前不良数量大于库存量')
if is_create:
wm.create_by = user
else:
wm.update_by = user
wm.save()
if material.tracking == Material.MA_TRACKING_SINGLE:
raise ParseError("加工前不良的物料暂不支持单件回退")
mlog.submit_time = None
mlog.submit_user = None
mlog.save()
# mtask变更状态
update_mtaskIds = []
if mlog.mtask:
update_mtaskIds.append(mlog.mtask.id)
list_m = Mlogb.objects.filter(mlog=mlog).values_list('mtask__id', flat=True).distinct()
update_mtaskIds += list(list_m)
if update_mtaskIds:
Mtask.objects.filter(id__in=update_mtaskIds, state=Mtask.MTASK_SUBMIT).update(state=Mtask.MTASK_ASSGINED)
# 更新任务进度
cal_mtask_progress_from_mlog(mlog)
# 更新物料数量
cal_material_count_from_mlog(mlog)
# 清除关系链
BatchLog.clear(mlog=mlog)
# 触发批次统计分析
if mlog.batch:
MyThread(target=get_alldata_with_batch_and_store, args=(mlog.batch,)).start()
def cal_mtask_progress_from_mlog(mlog):
"""
更新mlog关联的任务进度(可线程中执行)
"""
if mlog.fill_way in [Mlog.MLOG_2, Mlog.MLOG_12] and mlog.mtask:
update_mtask(mlog.mtask, fill_way=mlog.fill_way)
elif mlog.fill_way == Mlog.MLOG_23:
cal_mlog_count_from_mlogb(mlog)
m_outs_qs = Mlogb.objects.filter(mlog=mlog, material_out__isnull=False)
caled_mtask = []
for item in m_outs_qs.all():
mtask = item.mtask
if mtask:
if mtask in caled_mtask:
continue
update_mtask(mtask, fill_way=mlog.fill_way)
caled_mtask.append(mtask)
def cal_mlog_count_from_mlogb(mlog: Mlog):
"""
通过mlogb计算mlog count 合计
"""
if mlog.fill_way == Mlog.MLOG_23:
a_dict = {
"total_count_use": Sum('count_use'),
"total_count_break": Sum('count_break'),
"total_count_break_t": Sum('count_break_t'),
"total_count_real": Sum('count_real'),
"total_count_ok": Sum('count_ok'),
"total_count_notok": Sum('count_notok'),
}
f_names = [f.name for f in Mlogb._meta.fields if 'count_n_' in f.name]
for f in f_names:
a_dict[f'total_{f}'] = Sum(f)
mlogb_summary = Mlogb.objects.filter(mlog=mlog).aggregate(
**a_dict
)
# 更新Mlog对象的相应字段
mlog.count_use = mlogb_summary['total_count_use'] or 0
mlog.count_break = mlogb_summary['total_count_break'] or 0
mlog.count_break_t = mlogb_summary['total_count_break_t'] or 0
mlog.count_real = mlogb_summary['total_count_real'] or 0
mlog.count_ok = mlogb_summary['total_count_ok'] or 0
mlog.count_notok = mlogb_summary['total_count_notok'] or 0
for f in f_names:
setattr(mlog, f, mlogb_summary[f'total_{f}'] or 0)
# 保存更新后的Mlog对象
mlog.save()
def cal_material_count_from_mlog(mlog: Mlog):
"""
更新mlog关联的物料数量(可单独执行)
"""
matid_list = []
if mlog.material_in:
matid_list.append(mlog.material_in.id)
if mlog.material_out:
matid_list.append(mlog.material_out.id)
matid_list2 = Mlogb.objects.filter(mlog=mlog).values_list('material_in__id', 'material_out__id').distinct()
for matid in matid_list2:
if matid[0]:
matid_list.append(matid[0])
if matid[1]:
matid_list.append(matid[1])
matid_list = list(set(matid_list))
cal_material_count(matid_list)
def update_mtask(mtask: Mtask, fill_way: int = 10):
mtask = Mtask.objects.get(id=mtask.id) # 防止并发修改获取最新的mtask
from apps.pm.models import Utask
if fill_way == Mlog.MLOG_2:
res = Mlog.objects.filter(mtask=mtask).exclude(submit_time=None).aggregate(sum_count_real=Sum(
'count_real'), sum_count_ok=Sum('count_ok'), sum_count_notok=Sum('count_notok'))
mtask.count_real = res['sum_count_real'] if res['sum_count_real'] else 0
mtask.count_ok = res['sum_count_ok'] if res['sum_count_ok'] else 0
mtask.count_notok = res['sum_count_notok'] if res['sum_count_notok'] else 0
mtask.save()
utask = mtask.utask
if utask and mtask.is_count_utask:
res2 = Mtask.objects.filter(utask=utask, mgroup=mtask.mgroup).aggregate(sum_count_real=Sum(
'count_real'), sum_count_ok=Sum('count_ok'), sum_count_notok=Sum('count_notok'))
utask.count_real = res2['sum_count_real'] if res2['sum_count_real'] else 0
utask.count_ok = res2['sum_count_ok'] if res2['sum_count_ok'] else 0
utask.count_notok = res2['sum_count_notok'] if res2['sum_count_notok'] else 0
if utask.count_ok > 0 and utask.state == Utask.UTASK_ASSGINED:
utask.state = Utask.UTASK_WORKING
if Mtask.objects.filter(utask=utask).exclude(state=Mtask.MTASK_SUBMIT).count() == 0:
utask.state = Utask.UTASK_SUBMIT
utask.save()
elif fill_way in [Mlog.MLOG_23, Mlog.MLOG_12]:
# 已经提交的日志
m_outs_qs_mtask = Mlogb.objects.filter(mtask=mtask, material_out__isnull=False, mlog__submit_time__isnull=False)
res = m_outs_qs_mtask.aggregate(
sum_count_real=Sum('count_real', default=0),
sum_count_ok=Sum('count_ok', default=0),
sum_count_notok=Sum('count_notok', default=0)
)
mtask.count_real = res['sum_count_real'] or 0
mtask.count_ok = res['sum_count_ok'] or 0
mtask.count_notok = res['sum_count_notok'] or 0
mtask.save()
utask = mtask.utask
is_main_mgroup = False
if utask:
if utask.state == Utask.UTASK_ASSGINED:
utask.state = Utask.UTASK_WORKING
utask.save()
if mtask.is_count_utask:
is_main_mgroup = True
elif mtask.material_out == utask.material:
is_main_mgroup = True
if is_main_mgroup:
res2 = Mtask.objects.filter(utask=utask, mgroup=mtask.mgroup).aggregate(sum_count_real=Sum(
'count_real'), sum_count_ok=Sum('count_ok'), sum_count_notok=Sum('count_notok'))
utask.count_real = res2['sum_count_real'] if res2['sum_count_real'] else 0
utask.count_ok = res2['sum_count_ok'] if res2['sum_count_ok'] else 0
utask.count_notok = res2['sum_count_notok'] if res2['sum_count_notok'] else 0
if Mtask.objects.filter(utask=utask).exclude(state=Mtask.MTASK_SUBMIT).count() == 0:
utask.state = Utask.UTASK_SUBMIT
utask.save()
def handover_submit(handover:Handover, user: User, now: Union[datetime.datetime, None]):
"""
交接提交后需要执行的操作
"""
if handover.submit_time is not None:
return
now = timezone.now()
handoverb_qs = Handoverb.objects.filter(handover=handover)
need_add = True
material:Material = handover.material
mtype = handover.mtype
if '混料' in material.name: # hard code
need_add = False
if handoverb_qs.exists():
handoverb_list = [(item.wm.id, item.count, item) for item in handoverb_qs]
else:
handoverb_list = [(handover.wm.id, handover.count, handover)]
recive_mgroup = handover.recive_mgroup
recive_dept = handover.recive_dept
batches = []
new_batch = handover.new_batch
if new_batch and mtype != Handover.H_MERGE:
raise ParseError("只有合并时才能提供新批次号")
if mtype == Handover.H_MERGE:
if new_batch:
batches = [new_batch]
else:
raise ParseError("合并批次时请提供新批次号")
new_target = None
mids = []
for item in handoverb_list:
wmId, xcount, handover_or_b = item
if xcount <= 0:
raise ParseError("存在非正数!")
wm_from = WMaterial.objects.get(id=wmId)
mids.append(wm_from.material.id)
# 合并为新批
if mtype == Handover.H_MERGE:
batch = new_batch
if new_target is None:
new_target, _ = BatchSt.g_create(batch=batch, handover=handover, material_start=material)
source, _ = BatchSt.g_create(batch=wm_from.batch)
BatchLog.g_create(source=source, target=new_target, handover=handover, relation_type="merge")
elif mtype == Handover.H_DIV:
batch = handover_or_b.batch
target, _ = BatchSt.g_create(batch=batch, handover=handover, material_start=material)
if handover.wm is None:
raise ParseError('拆批请选择车间库存')
source, _ = BatchSt.g_create(batch=handover.wm.batch)
BatchLog.g_create(source=source, target=target, handover=handover, relation_type="split")
else:
batch = wm_from.batch
batches.append(batch)
if wm_from is None:
raise ParseError('找不到车间库存')
count_x = wm_from.count - xcount
if count_x < 0:
raise ParseError('车间库存不足!')
else:
wm_from.count = count_x
wm_from.save()
if need_add:
# 开始变动
if handover.type == Handover.H_NORMAL:
if mtype == Handover.H_MERGE and handover.new_wm:
wm_to = handover.new_wm
else:
wm_to, _ = WMaterial.objects.get_or_create(
batch=batch,
material=material,
mgroup=recive_mgroup,
belong_dept=recive_dept,
state=wm_from.state,
notok_sign=wm_from.notok_sign,
defect=wm_from.defect,
defaults={
"batch_ofrom": wm_from.batch_ofrom,
"material_ofrom": wm_from.material_ofrom,
"create_by": user
}
)
elif handover.type == Handover.H_REPAIR:
# 返修交接
recive_mgroup = handover.recive_mgroup
if recive_mgroup:
# if recive_mgroup.process.type == Process.PRO_TEST:
# wm_state = WMaterial.WM_REPAIRED
# else:
wm_state = WMaterial.WM_REPAIR
wm_to, _ = WMaterial.objects.get_or_create(
batch=batch,
material=material,
mgroup=recive_mgroup,
belong_dept=recive_dept,
notok_sign=wm_from.notok_sign,
defect=wm_from.defect,
material_origin=material,
state=wm_state,
defaults={
"batch_ofrom": wm_from.batch_ofrom,
"material_ofrom": wm_from.material_ofrom,
"create_by": user
}
)
else:
raise ParseError("返工交接必须指定接收工段")
elif handover.type == Handover.H_TEST:
wm_to, _ = WMaterial.objects.get_or_create(
batch=batch,
material=material,
mgroup=recive_mgroup,
state=WMaterial.WM_TEST,
belong_dept=recive_dept,
defaults={
"count_xtest": 0,
"batch_ofrom": wm_from.batch_ofrom,
"material_ofrom": wm_from.material_ofrom,
"create_by": user
},
)
elif handover.type == Handover.H_SCRAP:
if recive_mgroup:
wm_to, _ = WMaterial.objects.get_or_create(
batch=batch,
material=material,
mgroup=recive_mgroup,
belong_dept=recive_dept,
notok_sign=wm_from.notok_sign,
defect=wm_from.defect,
state=WMaterial.WM_SCRAP,
defaults={
"batch_ofrom": wm_from.batch_ofrom,
"material_ofrom": wm_from.material_ofrom,
"create_by": user
}
)
else:
raise ParseError("不支持非工段报废")
elif handover.type == Handover.H_CHANGE:
if handover.recive_mgroup:
wm_to, _ = WMaterial.objects.get_or_create(
batch=batch,
material=handover.material_changed,
mgroup=recive_mgroup,
belong_dept=recive_dept,
notok_sign=None,
defect=None,
material_origin=material,
state=WMaterial.WM_OK,
defaults={
"batch_ofrom": wm_from.batch_ofrom,
"material_ofrom": wm_from.material_ofrom,
"create_by": user
}
)
else:
raise ParseError("改版交接必须指定接收工段")
elif handover.type == Handover.H_BACK:
if mtype == Handover.H_MERGE and handover.new_wm:
wm_to = handover.new_wm
else:
wm_to, _ = WMaterial.objects.get_or_create(
batch=batch,
material=material,
mgroup=recive_mgroup,
belong_dept=recive_dept,
state=wm_from.state,
notok_sign=wm_from.notok_sign,
defect=wm_from.defect,
defaults={
"batch_ofrom": wm_from.batch_ofrom,
"material_ofrom": wm_from.material_ofrom,
"create_by": user
}
)
else:
raise ParseError("不支持该交接类型")
wm_to.count = wm_to.count + xcount
wm_to.count_eweight = handover.count_eweight # 这行代码有隐患
wm_to.save()
handover_or_b.wm_to = wm_to
handover_or_b.save()
if material.tracking == Material.MA_TRACKING_SINGLE:
handoverbws = Handoverbw.objects.filter(handoverb=handover_or_b)
if handoverbws.count() != xcount:
raise ParseError("交接与明细数量不一致,操作失败")
for item in handoverbws:
wpr:Wpr = item.wpr
Wpr.change_or_new(wpr=wpr, wm=wm_to, old_wm=wpr.wm, old_mb=wpr.mb)
handover.submit_user = user
handover.submit_time = now
if handover.recive_user is None:
handover.recive_user = user
handover.save()
batches = list(set(batches))
if batches:
for batch in batches:
MyThread(target=get_alldata_with_batch_and_store, args=(batch,)).start()
# 如果是改版交接需要触发统计数量
if handover.type == Handover.H_CHANGE:
mids.append(handover.material_changed.id)
cal_material_count(mids)
def handover_revert(handover:Handover):
BatchLog.clear(handover=handover)
pass
def mlog_submit_validate(ins: Mlog):
if ins.submit_time:
raise ParseError('该日志已提交!')
if ins.mtask and ins.mtask.state == Mtask.MTASK_STOP:
raise ParseError('该任务已停止!')
if ins.fill_way == Mlog.MLOG_23:
if not Mlogb.objects.filter(material_out__isnull=False, mlog=ins).exists():
raise ParseError('该日志未指定产出!')
if not Mlogb.objects.filter(material_in__isnull=False, mlog=ins).exists():
raise ParseError('该日志未指定消耗!')
if Mlogb.objects.filter(material_out__isnull=False, count_real=0, mlog=ins).exists():
raise ParseError('产出数量不能为0!')
def bind_mlog(ticket: Ticket, transition, new_ticket_data: dict):
ins = Mlog.objects.get(id=new_ticket_data['t_id'])
mlog_submit_validate(ins) # 校验是否可submit
ticket_data = ticket.ticket_data
ticket_data.update({
't_model': 'mlog',
't_id': ins.id,
})
ticket.ticket_data = ticket_data
ticket.create_by = ins.create_by
ticket.save()
if ins.ticket is None:
ins.ticket = ticket
ins.save()
def mlog_audit_end(ticket: Ticket):
now = timezone.now()
ins = Mlog.objects.get(id=ticket.ticket_data['t_id'])
mlog_submit(ins, ticket.create_by, now)
def get_batch_dag(batch_number: str):
try:
batch_ins = BatchSt.objects.get(batch=batch_number)
except Exception:
raise ParseError("该批次号未构建关系链")
# 收集所有相关批次和边
nodes_set = {batch_ins.id}
edges = []
prev_size = 0
r_dict = {
"split": "",
"merge": ""
}
while len(nodes_set) > prev_size:
prev_size = len(nodes_set)
# 查询所有与当前批次相关的记录作为source或target
logs = BatchLog.objects.filter(Q(source__id__in=nodes_set) | Q(target__id__in=nodes_set)).select_related(
"source", "target"
).order_by("update_time")
# 处理每条记录,扩展节点和边
for log in logs:
source = log.source.id
target = log.target.id
nodes_set.update([source, target])
edges.append({
'source': source,
'target': target,
'label': r_dict.get(log.relation_type, ""), # 使用relation_type作为边的标签
})
# 去重边
# unique_edges = {}
# for edge in edges:
# key = (edge['source'], edge['target'])
# if key not in unique_edges:
# unique_edges[key] = edge
# 将批次号排序
nodes_qs = BatchSt.objects.filter(id__in=nodes_set).order_by('update_time')
# batch_to_id = {batch: idx for idx, batch in enumerate(nodes_list)}
# 构建节点数据,默认使用'rect'形状
nodes = [{
'id': item.id,
'label': item.batch,
'shape': 'rect' # 可根据业务需求调整形状
} for item in nodes_qs]
# 构建边数据
# edges_converted = [{
# 'source': edge['source'],
# 'target': edge['target'],
# 'label': edge['label']
# } for edge in unique_edges.values()]
return {'nodes': nodes, 'edges': edges}