This commit is contained in:
zty 2024-11-25 17:45:10 +08:00
commit 1010a6b86f
19 changed files with 220 additions and 129 deletions

View File

@ -469,6 +469,13 @@ class TestViewSet(CustomGenericViewSet):
v_num=2) v_num=2)
return Response() return Response()
@action(methods=['post'], detail=False, serializer_class=Serializer)
def test_event_speak(self, request, pk=None):
from apps.ecm.models import Event
from apps.ecm.service import save_voice_and_speak
save_voice_and_speak(Event.objects.get(id=request.data['event']))
return Response()
@action(methods=['post'], detail=False, serializer_class=Serializer) @action(methods=['post'], detail=False, serializer_class=Serializer)
def test_not_in_place(self, request, pk=None): def test_not_in_place(self, request, pk=None):
opl = request.data.get('opl') opl = request.data.get('opl')

View File

@ -161,7 +161,7 @@ def save_voice_and_speak(event: Event):
# else: # else:
# _, event.voice, _ = generate_voice(event.voice_msg, v_p) # _, event.voice, _ = generate_voice(event.voice_msg, v_p)
# event.save() # event.save()
if main_cate.speaker_on and event.voice: if main_cate.speaker_on and event.voice_msg:
sps = [] sps = []
if event.area: # 如果事件存在发生区域 if event.area: # 如果事件存在发生区域
sps = list(TDevice.objects.filter(area=event.area, sps = list(TDevice.objects.filter(area=event.area,
@ -175,8 +175,8 @@ def save_voice_and_speak(event: Event):
for i in sps2: for i in sps2:
if i not in sps: if i not in sps:
sps.append(i) sps.append(i)
# myLogger.info('获取到喇叭:' + str(sps))
if sps: if sps:
myLogger.info(f'喇叭播放:{event.voice_msg}, {sps}, {v_num}')
spClient.speak(event.voice_msg, sps, v_num, v_p=v_p) spClient.speak(event.voice_msg, sps, v_num, v_p=v_p)
except Exception: except Exception:
myLogger.error('喇叭播放失败', exc_info=True) myLogger.error('喇叭播放失败', exc_info=True)

View File

@ -23,7 +23,8 @@ class EquipFilterSet(filters.FilterSet):
"cate__code": ['exact', 'in', 'contains'], "cate__code": ['exact', 'in', 'contains'],
"cate__is_for_safe": ['exact'], "cate__is_for_safe": ['exact'],
"cate__is_for_enp": ['exact'], "cate__is_for_enp": ['exact'],
"cate__is_car": ['exact'] "cate__is_car": ['exact'],
"is_deleted": ['exact']
} }
def filter_tag(self, queryset, name, value): def filter_tag(self, queryset, name, value):

View File

@ -1,7 +1,6 @@
from django.db import models from django.db import models
from apps.utils.models import CommonBModel, CommonADModel from apps.utils.models import CommonBModel, CommonADModel
from apps.system.models import User, Dictionary from apps.system.models import User
from enum import Enum
# Create your models here. # Create your models here.

View File

@ -41,16 +41,16 @@ class EquipmentViewSet(CustomModelViewSet):
设备列表 设备列表
""" """
queryset = Equipment.objects.all() queryset = Equipment.objects.get_queryset(all=True)
serializer_class = EquipmentSerializer serializer_class = EquipmentSerializer
select_related_fields = ["create_by", "belong_dept", "keeper", "mgroup"] select_related_fields = ["create_by", "belong_dept", "keeper", "mgroup"]
search_fields = ["number", "name"] search_fields = ["number", "name"]
filterset_class = EquipFilterSet filterset_class = EquipFilterSet
# def filter_queryset(self, queryset): def get_queryset(self):
# if not self.detail and not self.request.query_params.get('type', None): if self.request.method == 'GET' and (not self.request.query_params.get('is_deleted', None)):
# raise ParseError('请指定设备类型') self.queryset = Equipment.objects.all()
# return super().filter_queryset(queryset) return super().get_queryset()
@action(methods=["post"], detail=False, perms_map={"post": "equipment.create"}, serializer_class=Serializer) @action(methods=["post"], detail=False, perms_map={"post": "equipment.create"}, serializer_class=Serializer)
@transaction.atomic @transaction.atomic

View File

@ -185,7 +185,7 @@ class MIODoSerializer(CustomModelSerializer):
return super().create(validated_data) return super().create(validated_data)
def update(self, instance, validated_data): def update(self, instance, validated_data):
validated_data.pop('type') validated_data.pop('type', None)
return super().update(instance, validated_data) return super().update(instance, validated_data)
@ -212,7 +212,7 @@ class MIOSaleSerializer(CustomModelSerializer):
return super().create(validated_data) return super().create(validated_data)
def update(self, instance, validated_data): def update(self, instance, validated_data):
validated_data.pop('type') validated_data.pop('type', None)
return super().update(instance, validated_data) return super().update(instance, validated_data)
@ -239,7 +239,7 @@ class MIOPurSerializer(CustomModelSerializer):
return super().create(validated_data) return super().create(validated_data)
def update(self, instance, validated_data): def update(self, instance, validated_data):
validated_data.pop('type') validated_data.pop('type', None)
return super().update(instance, validated_data) return super().update(instance, validated_data)
@ -259,7 +259,7 @@ class MIOOtherSerializer(CustomModelSerializer):
return super().create(validated_data) return super().create(validated_data)
def update(self, instance, validated_data): def update(self, instance, validated_data):
validated_data.pop('type') validated_data.pop('type', None)
return super().update(instance, validated_data) return super().update(instance, validated_data)

View File

@ -132,9 +132,10 @@ def do_in(item: MIOItem):
else: else:
raise ParseError(f'{str(xmaterial)}-{xbatch}车间物料不足') raise ParseError(f'{str(xmaterial)}-{xbatch}车间物料不足')
wm_production_dept = wm.mgroup.belong_dept if wm.mgroup else None
if production_dept is None: if production_dept is None:
production_dept = wm.mgroup.belong_dept production_dept = wm_production_dept
elif production_dept != wm.mgroup.belong_dept: elif wm_production_dept and production_dept != wm_production_dept:
raise ParseError(f'{str(xmaterial)}-{xbatch}车间物料不属于同一车间') raise ParseError(f'{str(xmaterial)}-{xbatch}车间物料不属于同一车间')
# 增加mb # 增加mb
if not is_zhj: if not is_zhj:

View File

@ -147,12 +147,15 @@ class PmService:
mgroups = Mgroup.objects.filter(process=val.process) mgroups = Mgroup.objects.filter(process=val.process)
mgroups_count = mgroups.count() mgroups_count = mgroups.count()
if mgroups_count == 1: if mgroups_count == 1:
mgroup = mgroups.first() pass
elif mgroups_count == 0: elif mgroups_count == 0:
raise ParseError(f'{ind+1}步-工段不存在!') raise ParseError(f'{ind+1}步-工段不存在!')
else: # 后面可能会指定车间 else: # 存在同一工序的多个工段,先平均分配
raise ParseError(f'{ind+1}步-工段存在多个!') pass
if schedule_type == 'to_day': if schedule_type == 'to_day':
if mgroups_count > 1:
raise ParseError(f'{ind+1}步-工段存在多个!')
mgroup = mgroups.first()
task_count_day = math.ceil(count_task_list[ind]/rela_days) task_count_day = math.ceil(count_task_list[ind]/rela_days)
if rela_days >= 1: if rela_days >= 1:
for i in range(rela_days): for i in range(rela_days):
@ -173,22 +176,23 @@ class PmService:
'is_count_utask': val.is_count_utask 'is_count_utask': val.is_count_utask
}) })
elif schedule_type == 'to_mgroup': elif schedule_type == 'to_mgroup':
Mtask.objects.create(**{ for indx, mgroup in enumerate(mgroups):
'route': val, Mtask.objects.create(**{
'number': f'{number}_r{ind+1}', 'route': val,
'type': utask.type, 'number': f'{number}_r{ind+1}_m{indx+1}',
'material_out': halfgood, 'type': utask.type,
'material_in': material_in, 'material_out': halfgood,
'mgroup': mgroup, 'material_in': material_in,
'count': count_task_list[ind], 'mgroup': mgroup,
'start_date': start_date, 'count': math.ceil(count_task_list[ind]/mgroups_count),
'end_date': end_date, 'start_date': start_date,
'utask': utask, 'end_date': end_date,
'create_by': user, 'utask': utask,
'update_by': user, 'create_by': user,
'hour_work': val.hour_work, 'update_by': user,
'is_count_utask': val.is_count_utask 'hour_work': val.hour_work,
}) 'is_count_utask': val.is_count_utask
})
else: else:
raise ParseError('不支持的排产类型') raise ParseError('不支持的排产类型')
@classmethod @classmethod
@ -319,8 +323,6 @@ class PmService:
mtask.submit_time = now mtask.submit_time = now
mtask.submit_user = user mtask.submit_user = user
mtask.save() mtask.save()
utask = mtask.utask
PmService.utask_submit(utask)
else: else:
raise ParseError('该任务状态不可提交') raise ParseError('该任务状态不可提交')
@ -333,4 +335,4 @@ class PmService:
utask.state = Utask.UTASK_SUBMIT utask.state = Utask.UTASK_SUBMIT
utask.save() utask.save()
else: else:
raise ParseError('该任务状态不可提交') raise ParseError('存在子任务未提交')

View File

@ -0,0 +1,25 @@
# Generated by Django 3.2.12 on 2024-11-21 02:44
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('em', '0018_alter_equipment_mgroup'),
('qm', '0025_auto_20240920_0914'),
]
operations = [
migrations.AddField(
model_name='ftestwork',
name='equipment',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='em.equipment', verbose_name='所属检验设备'),
),
migrations.AddField(
model_name='ftestwork',
name='note',
field=models.TextField(blank=True, null=True, verbose_name='备注'),
),
]

View File

@ -2,6 +2,7 @@ from django.db import models
from apps.system.models import CommonAModel, CommonADModel, User from apps.system.models import CommonAModel, CommonADModel, User
from apps.utils.models import CommonBDModel, BaseModel from apps.utils.models import CommonBDModel, BaseModel
from apps.mtm.models import Material, Mgroup, Team, Shift from apps.mtm.models import Material, Mgroup, Team, Shift
from apps.em.models import Equipment
from apps.wpm.models import SfLog, WMaterial from apps.wpm.models import SfLog, WMaterial
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
@ -109,8 +110,10 @@ class FtestWork(CommonBDModel):
""" """
检验工作 检验工作
""" """
TYPE2_SOME = 10
TYPE2_ALL = 20
type = models.CharField('检验类型', max_length=20, choices=FTEST_TYPE_CHOICES, default='prod') type = models.CharField('检验类型', max_length=20, choices=FTEST_TYPE_CHOICES, default='prod')
type2 = models.PositiveSmallIntegerField('检验类型2', choices=((10, '抽检'), (20, '全检')), default=10) type2 = models.PositiveSmallIntegerField('检验类型2', choices=((TYPE2_SOME, '抽检'), (TYPE2_ALL, '全检')), default=10)
shift = models.ForeignKey(Shift, verbose_name='班次', on_delete=models.SET_NULL, null=True, blank=True) shift = models.ForeignKey(Shift, verbose_name='班次', on_delete=models.SET_NULL, null=True, blank=True)
wm = models.ForeignKey(WMaterial, verbose_name='关联车间库存', on_delete=models.SET_NULL, null=True, blank=True) wm = models.ForeignKey(WMaterial, verbose_name='关联车间库存', on_delete=models.SET_NULL, null=True, blank=True)
mb = models.ForeignKey('inm.materialbatch', verbose_name='关联仓库', on_delete=models.SET_NULL, null=True, blank=True) mb = models.ForeignKey('inm.materialbatch', verbose_name='关联仓库', on_delete=models.SET_NULL, null=True, blank=True)
@ -129,6 +132,8 @@ class FtestWork(CommonBDModel):
User, verbose_name='操作人', on_delete=models.CASCADE, related_name='ftestwork_test_user', null=True, blank=True) User, verbose_name='操作人', on_delete=models.CASCADE, related_name='ftestwork_test_user', null=True, blank=True)
submit_time = models.DateTimeField('提交时间', null=True, blank=True) submit_time = models.DateTimeField('提交时间', null=True, blank=True)
submit_user = models.ForeignKey(User, on_delete=models.CASCADE, verbose_name='提交人', null=True, blank=True) submit_user = models.ForeignKey(User, on_delete=models.CASCADE, verbose_name='提交人', null=True, blank=True)
note = models.TextField('备注', null=True, blank=True)
equipment = models.ForeignKey(Equipment, verbose_name='所属检验设备', on_delete=models.SET_NULL, null=True, blank=True)
class Ftest(CommonBDModel): class Ftest(CommonBDModel):

View File

@ -66,7 +66,11 @@ class QuaStatUpdateSerializer(CustomModelSerializer):
class FtestWorkCreateUpdateSerializer(CustomModelSerializer): class FtestWorkCreateUpdateSerializer(CustomModelSerializer):
class Meta: class Meta:
model = FtestWork model = FtestWork
fields = ['id', 'shift', 'wm', 'mb', 'type', 'type2', 'test_date', 'count', 'count_sampling', 'count_sampling_ok', 'count_ok', 'count_notok', 'count_notok_json', 'test_user', 'need_update_wm'] fields = ['id', 'shift', 'wm', 'mb',
'type', 'type2', 'test_date', 'count', 'count_sampling',
'count_sampling_ok', 'count_ok', 'count_notok',
'count_notok_json', 'test_user', 'need_update_wm',
'equipment', 'note']
extra_kwargs = {'test_user': {'required': True}, 'type': {'required': True}} extra_kwargs = {'test_user': {'required': True}, 'type': {'required': True}}
def validate(self, attrs): def validate(self, attrs):
@ -182,3 +186,8 @@ class PtestSerializer(CustomModelSerializer):
class Meta: class Meta:
model = Ptest model = Ptest
fields = '__all__' fields = '__all__'
def create(self, validated_data):
if Ptest.objects.filter(sample_number=validated_data['sample_number']).exists():
raise serializers.ValidationError('该样品编号已存在')
return super().create(validated_data)

View File

@ -83,23 +83,35 @@ class FtestViewSet(CustomModelViewSet):
select_related_fields = ['test_user', 'check_user', 'ftest_work'] select_related_fields = ['test_user', 'check_user', 'ftest_work']
filterset_fields = ['type', 'ftest_work'] filterset_fields = ['type', 'ftest_work']
def count_sampling(self, ftest_work:FtestWork):
qs = Ftest.objects.filter(ftest_work=ftest_work)
all_count = qs.count()
ok_count = qs.filter(is_ok=True).count()
ftest_work.count_sampling = all_count
ftest_work.count_sampling_ok = ok_count
if ftest_work.type2 == FtestWork.TYPE2_ALL: # 如果是全检
ftest_work.count_ok = ok_count
ftest_work.count_notok = all_count - ok_count
ftest_work.save()
@transaction.atomic @transaction.atomic
def perform_create(self, serializer): def perform_create(self, serializer):
ins = serializer.save() ins: Ftest = serializer.save()
if ins.ftest_work: if ins.ftest_work:
ins.ftest_work.count_sampling = Ftest.objects.filter( self.count_sampling(ins.ftest_work)
ftest_work=ins.ftest_work).count()
ins.ftest_work.save() @transaction.atomic
return ins def perform_update(self, serializer):
ins: Ftest = serializer.save()
if ins.ftest_work:
self.count_sampling(ins.ftest_work)
@transaction.atomic @transaction.atomic
def perform_destroy(self, instance): def perform_destroy(self, instance):
ftest_work = instance.ftest_work ftest_work = instance.ftest_work
instance.delete() instance.delete()
if ftest_work: if ftest_work:
instance.ftest_work.count_sampling = Ftest.objects.filter( self.count_sampling(ftest_work)
ftest_work=instance.ftest_work).count()
instance.ftest_work.save()
class PtestViewSet(CustomModelViewSet): class PtestViewSet(CustomModelViewSet):

View File

@ -249,9 +249,10 @@ class TDeviceViewSet(ListModelMixin, UpdateModelMixin, DestroyModelMixin, Custom
"pageSize": 1000 "pageSize": 1000
} }
_, res = spClient.request(**spapis['device_list'], params=params) _, res = spClient.request(**spapis['device_list'], params=params)
rows = res['rows'] rows = res.get('rows', [])
t_l = [] t_l = []
for i in rows: filtered_rows = [i for i in rows if i['name'] != '采集器']
for i in filtered_rows:
t_l.append(i['sn']) t_l.append(i['sn'])
td = TDevice.objects.filter(code=i['sn']).first() td = TDevice.objects.filter(code=i['sn']).first()
if td: if td:
@ -424,17 +425,19 @@ class TDeviceViewSet(ListModelMixin, UpdateModelMixin, DestroyModelMixin, Custom
# print(request.data) # print(request.data)
_, res = spClient.request(**spapis['device_list'], params=request.data) _, res = spClient.request(**spapis['device_list'], params=request.data)
codes = [] codes = []
for i in res['rows']: filtered_rows = [i for i in res.get('rows', []) if i['name'] != '采集器']
for i in filtered_rows:
codes.append(i['sn']) codes.append(i['sn'])
tds_info = TDeviceSerializer( tds_info = TDeviceSerializer(
instance=TDevice.objects.filter(code__in=codes), many=True).data instance=TDevice.objects.filter(code__in=codes), many=True).data
tds_dict = {} tds_dict = {}
for i in tds_info: for i in tds_info:
tds_dict[i['code']] = i tds_dict[i['code']] = i
for i in res['rows']: for i in filtered_rows:
i['my_info'] = {} i['my_info'] = {}
if i['sn'] in tds_dict: if i['sn'] in tds_dict:
i['my_info'] = tds_dict[i['sn']] i['my_info'] = tds_dict[i['sn']]
res['rows'] = filtered_rows
return Response(res) return Response(res)
@action(methods=['post'], detail=False, perms_map={'post': '*'}, @action(methods=['post'], detail=False, perms_map={'post': '*'},

View File

@ -37,7 +37,11 @@ class WMaterialFilter(filters.FilterSet):
return queryset.filter(mgroup__id=value)|queryset.filter(belong_dept=Mgroup.objects.get(id=value).belong_dept, mgroup=None) return queryset.filter(mgroup__id=value)|queryset.filter(belong_dept=Mgroup.objects.get(id=value).belong_dept, mgroup=None)
def filter_mtaskx(self, queryset, name, value): def filter_mtaskx(self, queryset, name, value):
return WMaterial.mat_in_qs(mtask=Mtask.objects.get(id=value), qs=queryset) mtask = Mtask.objects.get(id=value)
if mtask.route and mtask.route.batch_bind:
return WMaterial.mat_in_qs(mtask=Mtask.objects.get(id=value), qs=queryset)
return queryset
class Meta: class Meta:
model = WMaterial model = WMaterial
fields = { fields = {
@ -117,6 +121,9 @@ class MlogbFilter(filters.FilterSet):
fields = { fields = {
"mlog": ["exact"], "mlog": ["exact"],
"mtask": ["exact"], "mtask": ["exact"],
"wm_in": ["exact"],
"mlog__submit_time": ["isnull"],
"wm_in__state": ["exact"],
"material_in": ["exact", "isnull"], "material_in": ["exact", "isnull"],
"material_out": ["exact", "isnull"], "material_out": ["exact", "isnull"],
} }

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.12 on 2024-11-21 05:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wpm', '0071_auto_20241113_1555'),
]
operations = [
migrations.AddField(
model_name='handover',
name='note',
field=models.TextField(blank=True, null=True, verbose_name='备注'),
),
]

View File

@ -356,6 +356,7 @@ class Handover(CommonADModel):
submit_time = models.DateTimeField('提交时间', null=True, blank=True) submit_time = models.DateTimeField('提交时间', null=True, blank=True)
submit_user = models.ForeignKey( submit_user = models.ForeignKey(
User, verbose_name='提交人', on_delete=models.CASCADE, null=True, blank=True, related_name='handover_submit_user') User, verbose_name='提交人', on_delete=models.CASCADE, null=True, blank=True, related_name='handover_submit_user')
note = models.TextField('备注', null=True, blank=True)
@property @property
def handoverb(self): def handoverb(self):

View File

@ -236,8 +236,6 @@ class MlogSerializer(CustomModelSerializer):
source='create_by.name', read_only=True) source='create_by.name', read_only=True)
update_by_name = serializers.CharField( update_by_name = serializers.CharField(
source='update_by.name', read_only=True) source='update_by.name', read_only=True)
handovers = serializers.PrimaryKeyRelatedField(
source='handover_mlog', read_only=True, many=True)
material_out_ = MaterialSimpleSerializer( material_out_ = MaterialSimpleSerializer(
source='material_out', read_only=True) source='material_out', read_only=True)
material_out_name = serializers.StringRelatedField( material_out_name = serializers.StringRelatedField(
@ -254,24 +252,13 @@ class MlogSerializer(CustomModelSerializer):
source='equipment', read_only=True) source='equipment', read_only=True)
equipment_2_name = serializers.StringRelatedField( equipment_2_name = serializers.StringRelatedField(
source='equipment_2', read_only=True) source='equipment_2', read_only=True)
shift = serializers.PrimaryKeyRelatedField(
label='班次ID', queryset=Shift.objects.all(), required=True)
mgroup = serializers.PrimaryKeyRelatedField(
label='工段ID', queryset=Mgroup.objects.all(), required=True
)
material_out = serializers.PrimaryKeyRelatedField(
label='产物ID', queryset=Material.objects.all(), required=True
)
shift_name = serializers.CharField(source='shift.name', read_only=True) shift_name = serializers.CharField(source='shift.name', read_only=True)
mlogb = MlogbSerializer( mlogb = MlogbSerializer(
label='多产出件信息', many=True, required=False) label='多产出件信息', many=True, required=False)
mlogb_full = MlogbDetailSerializer( mlogb_full = MlogbDetailSerializer(
label='物料信息', many=True, read_only=True label='物料信息', many=True, read_only=True)
)
handle_users_ = UserSimpleSerializer( handle_users_ = UserSimpleSerializer(
source='handle_users', many=True, read_only=True) source='handle_users', many=True, read_only=True)
equipments_name = serializers.StringRelatedField(
source='equipments', read_only=True, many=True)
ticket_ = TicketSimpleSerializer(source='ticket', read_only=True) ticket_ = TicketSimpleSerializer(source='ticket', read_only=True)
test_user_name = serializers.CharField(source='test_user.name', read_only=True) test_user_name = serializers.CharField(source='test_user.name', read_only=True)
@ -281,7 +268,9 @@ class MlogSerializer(CustomModelSerializer):
read_only_fields = EXCLUDE_FIELDS + \ read_only_fields = EXCLUDE_FIELDS + \
['submit_time', 'submit_user', 'material_outs'] ['submit_time', 'submit_user', 'material_outs']
extra_kwargs = { extra_kwargs = {
"batch": {"required": True} "batch": {"required": True},
"shift": {"required": True},
"material_out": {"required": True}
} }
def create(self, validated_data): def create(self, validated_data):
@ -297,16 +286,17 @@ class MlogSerializer(CustomModelSerializer):
wm_in = instance.wm_in wm_in = instance.wm_in
if wm_in: if wm_in:
batch_in = wm_in.batch batch_in = wm_in.batch
add_dict = { if instance.material_in: # 如果有消耗
'mlog': instance, 'batch': batch_in, 'wm_in': wm_in, add_dict = {
'mtask': instance.mtask, 'material_in': instance.material_in, 'mlog': instance, 'batch': batch_in, 'wm_in': wm_in,
'count_use': instance.count_use, 'count_break': instance.count_break, 'mtask': instance.mtask, 'material_in': instance.material_in,
'count_pn_jgqbl': instance.count_pn_jgqbl 'count_use': instance.count_use, 'count_break': instance.count_break,
} 'count_pn_jgqbl': instance.count_pn_jgqbl
if wm_in: }
add_dict['batch_ofrom'] = wm_in.batch_ofrom if wm_in:
add_dict['material_ofrom'] = wm_in.material_ofrom add_dict['batch_ofrom'] = wm_in.batch_ofrom
Mlogb.objects.create(**add_dict) add_dict['material_ofrom'] = wm_in.material_ofrom
Mlogb.objects.create(**add_dict)
# mlogb只用于组合件输出物填写 # mlogb只用于组合件输出物填写
brotherId_should_list = material_out.brothers brotherId_should_list = material_out.brothers
@ -337,6 +327,7 @@ class MlogSerializer(CustomModelSerializer):
add_dict_2 = { add_dict_2 = {
'mlog': instance, 'batch': batch_out, 'mlog': instance, 'batch': batch_out,
'mtask': instance.mtask, 'material_out': instance.material_out, 'mtask': instance.mtask, 'material_out': instance.material_out,
'count_real': instance.count_real,
'count_ok': instance.count_ok, 'count_notok': instance.count_notok, 'count_ok': instance.count_ok, 'count_notok': instance.count_notok,
'count_break_t': instance.count_break_t 'count_break_t': instance.count_break_t
} }
@ -345,9 +336,8 @@ class MlogSerializer(CustomModelSerializer):
add_dict_2[f.name] = getattr(instance, f.name) add_dict_2[f.name] = getattr(instance, f.name)
ddict = {} ddict = {}
if wm_in: if wm_in:
wm_in = instance.wm_in
ddict = {"batch_ofrom": wm_in.batch_ofrom, "material_ofrom": wm_in.material_ofrom} ddict = {"batch_ofrom": wm_in.batch_ofrom, "material_ofrom": wm_in.material_ofrom}
Mlogb.objects.create(**add_dict_2, defaults=ddict) Mlogb.objects.get_or_create(**add_dict_2, defaults=ddict)
return instance return instance
def update(self, instance, validated_data): def update(self, instance, validated_data):
@ -362,39 +352,47 @@ class MlogSerializer(CustomModelSerializer):
mlogb = validated_data.pop('mlogb', []) mlogb = validated_data.pop('mlogb', [])
instance: Mlog = super().update(instance, validated_data) instance: Mlog = super().update(instance, validated_data)
wm_in = instance.wm_in wm_in = instance.wm_in
if instance.fill_way == Mlog.MLOG_12: batch_in = instance.batch
if wm_in:
batch_in = wm_in.batch
# 修改消耗
if instance.fill_way in [Mlog.MLOG_12, Mlog.MLOG_2]:
# 自动生成mlogb # 自动生成mlogb
batch_in = instance.batch if instance.material_in: # 有消耗的情况
if wm_in: minx, _ = Mlogb.objects.get_or_create(
batch_in = wm_in.batch mlog=instance,
minx, _ = Mlogb.objects.get_or_create( batch=batch_in,
mlog=instance, wm_in=instance.wm_in,
batch=batch_in, mtask=instance.mtask,
wm_in=instance.wm_in, material_in=instance.material_in
mtask=instance.mtask, )
material_in=instance.material_in if wm_in:
) minx.batch_ofrom = wm_in.batch_ofrom
if wm_in: minx.material_ofrom = wm_in.material_ofrom
minx.batch_ofrom = wm_in.batch_ofrom minx.count_use = instance.count_use
minx.material_ofrom = wm_in.material_ofrom minx.count_break = instance.count_break
minx.count_use = instance.count_use minx.count_pn_jgqbl = instance.count_pn_jgqbl
minx.count_break = instance.count_break minx.save()
minx.count_pn_jgqbl = instance.count_pn_jgqbl Mlogb.objects.filter(mlog=instance, material_in__isnull=False).exclude(id=minx.id).delete()
minx.save()
# 修改产出
if mlogb and instance.fill_way == Mlog.MLOG_2: if instance.fill_way == Mlog.MLOG_2 and instance.material_out.brothers:
Mlogb.objects.filter(mlog=instance, material_out__isnull=False).update(count_ok=0) # 针对兄弟件的情况
Mlogb.objects.filter(mlog=instance, material_out__isnull=False).update(
batch=instance.batch, # 注意mlog的batch有可能会进行修改
count_ok=0)
for item in mlogb: for item in mlogb:
Mlogb.objects.filter(mlog=instance, material_out=item['material_out']).update( Mlogb.objects.filter(mlog=instance, material_out=item['material_out']).update(
batch=instance.batch,
count_ok=item['count_ok']) count_ok=item['count_ok'])
elif instance.fill_way == Mlog.MLOG_12:
elif instance.fill_way in [Mlog.MLOG_12, Mlog.MLOG_2]:
# 生成产出物 # 生成产出物
batch_out = instance.batch batch_out = instance.batch
if batch_out: if batch_out:
pass pass
else: else:
batch_out = generate_new_batch(batch_in, instance) batch_out = generate_new_batch(batch_in, instance)
mox, _ = Mlogb.objects.get_or_create(mlog=instance, batch=batch_out, mox, _ = Mlogb.objects.get_or_create(mlog=instance, batch=batch_out,
mtask=instance.mtask, material_out=instance.material_out) mtask=instance.mtask, material_out=instance.material_out)
mox.count_ok = instance.count_ok mox.count_ok = instance.count_ok
@ -411,7 +409,7 @@ class MlogSerializer(CustomModelSerializer):
return instance return instance
def validate(self, attrs): def validate(self, attrs):
attrs['fill_way'] = Mlog.MLOG_2 attrs['fill_way'] = Mlog.MLOG_2 # 只填第二级
attrs['mtype'] = Mlog.MTYPE_SELF # 默认为自生产 attrs['mtype'] = Mlog.MTYPE_SELF # 默认为自生产
fmlog = attrs.get('fmlog', None) fmlog = attrs.get('fmlog', None)
mtaskb = attrs.get('mtaskb', None) mtaskb = attrs.get('mtaskb', None)
@ -645,7 +643,7 @@ class HandoverSerializer(CustomModelSerializer):
material_name = serializers.StringRelatedField( material_name = serializers.StringRelatedField(
source='material', read_only=True) source='material', read_only=True)
wm_notok_sign = serializers.CharField(source='wm.notok_sign', read_only=True) wm_notok_sign = serializers.CharField(source='wm.notok_sign', read_only=True)
handoverb = HandoverbSerializer(many=True) handoverb = HandoverbSerializer(many=True, required=False)
def validate(self, attrs): def validate(self, attrs):
if 'type' not in attrs: if 'type' not in attrs:
@ -678,8 +676,10 @@ class HandoverSerializer(CustomModelSerializer):
for ind, item in enumerate(attrs['handoverb']): for ind, item in enumerate(attrs['handoverb']):
wm = item["wm"] wm = item["wm"]
t_count += item["count"] t_count += item["count"]
if wm.mgroup != attrs['send_mgroup']: # if wm.mgroup and wm.mgroup != attrs['send_mgroup']:
raise ParseError(f'{ind+1}物料与交接工段不一致') # raise ParseError(f'第{ind+1}物料与交接工段不一致')
# if wm.belong_dept and wm.belong_dept != attrs['send_dept']:
# raise ParseError(f'第{ind+1}物料与交接部门不一致')
if attrs["material"] != wm.material: if attrs["material"] != wm.material:
raise ParseError(f'{ind+1}物料与交接物料不一致') raise ParseError(f'{ind+1}物料与交接物料不一致')
if wm.notok_sign is not None and attrs['type'] in [Handover.H_NORMAL, Handover.H_TEST]: if wm.notok_sign is not None and attrs['type'] in [Handover.H_NORMAL, Handover.H_TEST]:
@ -734,7 +734,7 @@ class HandoverSerializer(CustomModelSerializer):
class HandoverUpdateSerializer(CustomModelSerializer): class HandoverUpdateSerializer(CustomModelSerializer):
class Meta: class Meta:
model = Handover model = Handover
fields = ['id', 'send_date', 'send_user', 'count', 'count_eweight', 'recive_user'] fields = ['id', 'send_date', 'send_user', 'count', 'count_eweight', 'recive_user', 'note']

View File

@ -156,10 +156,10 @@ def mlog_submit(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
supplier = mlog.supplier # 外协 supplier = mlog.supplier # 外协
m_ins_list = [] m_ins_list = []
if material_in: # 需要进行车间库存管理 if material_in: # 需要进行车间库存管理
m_ins_list = []
m_ins_bl_list = []
m_ins = Mlogb.objects.filter(mlog=mlog, material_in__isnull=False) m_ins = Mlogb.objects.filter(mlog=mlog, material_in__isnull=False)
if m_ins.exists(): if m_ins.exists():
m_ins_list = []
m_ins_bl_list = []
for mi in m_ins.all(): for mi in m_ins.all():
m_ins_list.append((mi.material_in, mi.batch, mi.count_use, mi.wm_in)) m_ins_list.append((mi.material_in, mi.batch, mi.count_use, mi.wm_in))
if mi.count_pn_jgqbl > 0: if mi.count_pn_jgqbl > 0:
@ -224,6 +224,8 @@ def mlog_submit(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
notok_sign = f.name.replace('count_n_', '') notok_sign = f.name.replace('count_n_', '')
m_outs_list.append( (item.material_out, item.batch if item.batch else mlog.batch, getattr(item, f.name), mlog.count_real_eweight, notok_sign, item)) m_outs_list.append( (item.material_out, item.batch if item.batch else mlog.batch, getattr(item, f.name), mlog.count_real_eweight, notok_sign, item))
stored_notok = True stored_notok = True
# 这里有一个漏洞在产出物为兄弟件的情况下不合格品的数量是记录在mlog上的
# 而不是mlogb上以上的额外处理就没有效果了, 不过光子不记录不合格品
else: else:
m_outs_list = [(material_out, mlog.batch, mlog.count_ok, mlog.count_real_eweight, None, mlog)] m_outs_list = [(material_out, mlog.batch, mlog.count_ok, mlog.count_real_eweight, None, mlog)]
# 一次填写的暂时不处理不合格品 # 一次填写的暂时不处理不合格品
@ -277,11 +279,11 @@ def mlog_revert(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
stored_mgroup = mlog.stored_mgroup stored_mgroup = mlog.stored_mgroup
if material_in: if material_in:
# 领用数退回 # 领用数退回
m_ins_list = []
m_ins_bl_list = []
into_wm_mgroup = material_in.process.into_wm_mgroup if material_in.process else False into_wm_mgroup = material_in.process.into_wm_mgroup if material_in.process else False
m_ins = Mlogb.objects.filter(mlog=mlog, material_in__isnull=False) m_ins = Mlogb.objects.filter(mlog=mlog, material_in__isnull=False)
if m_ins.exists(): if m_ins.exists():
m_ins_list = []
m_ins_bl_list = []
for mi in m_ins.all(): for mi in m_ins.all():
m_ins_list.append((mi.material_in, mi.batch, mi.count_use, mi.wm_in)) m_ins_list.append((mi.material_in, mi.batch, mi.count_use, mi.wm_in))
if mi.count_pn_jgqbl > 0: if mi.count_pn_jgqbl > 0:

View File

@ -149,10 +149,10 @@ class MlogViewSet(CustomModelViewSet):
""" """
queryset = Mlog.objects.all() queryset = Mlog.objects.all()
serializer_class = MlogSerializer serializer_class = MlogSerializer
select_related_fields = ['create_by', 'update_by', 'mtask', select_related_fields = ['create_by', 'update_by', 'mtask', 'mtaskb',
'handle_user', 'handle_user_2', 'equipment', 'handle_user', 'handle_user_2', 'equipment', 'mgroup__belong_dept',
'equipment_2', 'material_in', 'material_out', 'route__routepack', 'equipment_2', 'material_in', 'material_out', 'route__routepack',
'supplier', 'ticket', 'mgroup__process', 'test_user'] 'supplier', 'ticket', 'mgroup__process', 'test_user', 'handle_leader', 'test_user']
prefetch_related_fields = ['handle_users', prefetch_related_fields = ['handle_users',
'material_outs', 'b_mlog', 'equipments'] 'material_outs', 'b_mlog', 'equipments']
filterset_class = MlogFilter filterset_class = MlogFilter
@ -160,17 +160,17 @@ class MlogViewSet(CustomModelViewSet):
'material_in__number', 'material_in__specification', 'batch', 'material_in__model', 'material_in__number', 'material_in__specification', 'batch', 'material_in__model',
'material_out__name', 'material_out__number', 'material_out__specification', 'material_out__model',] 'material_out__name', 'material_out__number', 'material_out__specification', 'material_out__model',]
@transaction.atomic # @transaction.atomic
def perform_create(self, serializer): # def perform_create(self, serializer):
ins = serializer.save() # ins = serializer.save()
data = MlogSerializer(ins).data # data = MlogSerializer(ins).data
create_auditlog('create', ins, data) # create_auditlog('create', ins, data)
@transaction.atomic @transaction.atomic
def perform_destroy(self, instance): def perform_destroy(self, instance):
if instance.submit_time is not None: if instance.submit_time is not None:
raise ParseError('日志已提交不可变动') raise ParseError('日志已提交不可变动')
delete_auditlog(instance, instance.id) # delete_auditlog(instance, instance.id)
instance.delete() instance.delete()
@transaction.atomic @transaction.atomic
@ -178,10 +178,10 @@ class MlogViewSet(CustomModelViewSet):
ins = serializer.instance ins = serializer.instance
if ins.submit_time is not None: if ins.submit_time is not None:
raise ParseError('该日志已提交!') raise ParseError('该日志已提交!')
val_old = MlogSerializer(instance=ins).data # val_old = MlogSerializer(instance=ins).data
serializer.save() serializer.save()
val_new = MlogSerializer(instance=ins).data # val_new = MlogSerializer(instance=ins).data
create_auditlog('update', ins, val_new, val_old) # create_auditlog('update', ins, val_new, val_old)
@action(methods=['post'], detail=False, perms_map={'post': 'mlog.init'}, serializer_class=MlogInitSerializer) @action(methods=['post'], detail=False, perms_map={'post': 'mlog.init'}, serializer_class=MlogInitSerializer)
def init(self, request, *args, **kwargs): def init(self, request, *args, **kwargs):
@ -213,7 +213,6 @@ class MlogViewSet(CustomModelViewSet):
日志提交 日志提交
""" """
ins: Mlog = self.get_object() ins: Mlog = self.get_object()
vdata_old = MlogSerializer(ins).data
now = timezone.now() now = timezone.now()
if ins.ticket: if ins.ticket:
raise ParseError('该日志存在审批!') raise ParseError('该日志存在审批!')
@ -225,8 +224,8 @@ class MlogViewSet(CustomModelViewSet):
with transaction.atomic(): with transaction.atomic():
mlog_submit(ins, self.request.user, now) mlog_submit(ins, self.request.user, now)
vdata_new = MlogSerializer(ins).data vdata_new = MlogSerializer(ins).data
create_auditlog('submit', ins, vdata_new, # create_auditlog('submit', ins, vdata_new,
vdata_old, now, self.request.user) # vdata_old, now, self.request.user)
MyThread(target=cal_mtask_progress_from_mlog,args=(ins,)).start() MyThread(target=cal_mtask_progress_from_mlog,args=(ins,)).start()
MyThread(target=cal_material_count_from_mlog,args=(ins,)).start() MyThread(target=cal_material_count_from_mlog,args=(ins,)).start()
return Response(vdata_new) return Response(vdata_new)
@ -246,8 +245,8 @@ class MlogViewSet(CustomModelViewSet):
now = timezone.now() now = timezone.now()
with transaction.atomic(): with transaction.atomic():
mlog_revert(ins, user, now) mlog_revert(ins, user, now)
create_auditlog('revert', ins, {}, {}, now, user, # create_auditlog('revert', ins, {}, {}, now, user,
request.data.get('change_reason', '')) # request.data.get('change_reason', ''))
MyThread(target=cal_mtask_progress_from_mlog,args=(ins,)).start() MyThread(target=cal_mtask_progress_from_mlog,args=(ins,)).start()
MyThread(target=cal_material_count_from_mlog,args=(ins,)).start() MyThread(target=cal_material_count_from_mlog,args=(ins,)).start()
return Response(MlogSerializer(instance=ins).data) return Response(MlogSerializer(instance=ins).data)