Compare commits

..

30 Commits

Author SHA1 Message Date
caoqianming d29fcce935 fix: base user_exist完善 2026-01-23 16:18:17 +08:00
caoqianming a534bde086 feat: wmaterial根据current_merged查询3 2026-01-22 16:12:35 +08:00
caoqianming 63002f27c8 feat: wmaterial根据current_merged查询2 2026-01-22 16:00:59 +08:00
caoqianming 4bbae8b7df feat: wmaterial根据current_merged查询 2026-01-21 11:09:53 +08:00
caoqianming dc26c7cc46 feat: handoverb添加oinfo_json字段 2026-01-16 15:29:58 +08:00
caoqianming 0d80e182cd feat: base user增加has_perm筛选条件 2026-01-16 14:48:08 +08:00
caoqianming 2759114ede feat: base 升级后同步数据库 2026-01-16 14:42:42 +08:00
caoqianming 80f832aa85 feat: 光子添加工段数据统计 2026-01-16 14:01:13 +08:00
caoqianming 70e49eb27e fix: batchst支持返回source_near修复 2026-01-16 14:00:52 +08:00
caoqianming e99b2ecbbc fix: base complexquerymixin支持add_info_for_list 2026-01-16 14:00:11 +08:00
caoqianming 146e842642 feat: with_source_near筛选体现在swagger里 2026-01-15 16:47:15 +08:00
caoqianming 47b1887c4b feat: base 调整asgi导入以保证正常启动 2026-01-15 09:13:09 +08:00
caoqianming 1ffbe0cc44 feat: wpr 返回wpr_from_ 2026-01-13 15:02:52 +08:00
caoqianming 3e173f7a72 feat: base cquery支持add_info_for_list 2026-01-13 14:57:13 +08:00
caoqianming fce66da1d9 feat: wpr 添加筛选条件wpr_from 2026-01-13 14:15:16 +08:00
caoqianming feb8bd6770 feat: wpr_bxerp优化 2026-01-13 10:29:41 +08:00
caoqianming 43f5f11ca8 feat: 未有ftest的也触发单个统计 2026-01-13 09:05:19 +08:00
caoqianming d5ea72a021 feat: 交接记录子项需保证工段/车间一致2 2026-01-12 15:44:48 +08:00
caoqianming 143d9cb719 fix: base locked_get_or_create优化 2026-01-12 15:30:55 +08:00
caoqianming cf6633592a feat: 交接记录子项需保证工段/车间一致 2026-01-12 13:44:08 +08:00
caoqianming b39b0e7923 fix: mlog并发优化的bug 2026-01-12 13:27:29 +08:00
caoqianming 70563a6c02 feat: mlog 并发优化 2026-01-12 11:16:04 +08:00
caoqianming def22f6b18 feat: handover可以查看仅交接到车间的记录 2026-01-12 10:28:51 +08:00
caoqianming f9eee5a523 feat: handover_revert 并发优化 2026-01-12 10:21:15 +08:00
caoqianming 2ecaeadff7 feat: handover_submit 并发优化2 2026-01-09 16:59:53 +08:00
caoqianming 6eee0e1e53 feat: handover_submit 并发优化 2026-01-09 16:54:24 +08:00
caoqianming 3417515e72 feat: base 添加locked_get_or_create 2026-01-09 16:53:57 +08:00
caoqianming 43abcbaa48 feat: 查询-n批次从正则改用like以优化性能 2026-01-09 15:55:56 +08:00
caoqianming e2a92b6faa feat: 固定依赖包 2026-01-08 10:40:00 +08:00
caoqianming 02e3265133 feat: 升级依赖包 2026-01-08 09:59:39 +08:00
21 changed files with 616 additions and 187 deletions

View File

@ -11,7 +11,7 @@ router.register('question', QuestionViewSet, basename='question')
router.register('paper', PaperViewSet, basename='paper')
router.register('exam', ExamViewSet, basename='exam')
router.register('examrecord', ExamRecordViewSet, basename='examrecord')
router.register('training', TrainRecordViewSet, basename='examrecord')
router.register('training', TrainRecordViewSet, basename='training')
urlpatterns = [
path(API_BASE_URL, include(router.urls)),
]

View File

@ -13,10 +13,10 @@ router.register('warehouse', WarehouseVIewSet, basename='warehouse')
router.register('materialbatch', MaterialBatchViewSet,
basename='materialbatch')
router.register('mio', MIOViewSet, basename='mio')
router.register('mio/do', MioDoViewSet)
router.register('mio/sale', MioSaleViewSet)
router.register('mio/pur', MioPurViewSet)
router.register('mio/other', MioOtherViewSet)
router.register('mio/do', MioDoViewSet, basename='mio_do')
router.register('mio/sale', MioSaleViewSet, basename='mio_sale')
router.register('mio/pur', MioPurViewSet, basename='mio_pur')
router.register('mio/other', MioOtherViewSet, basename='mio_other')
router.register('mioitem', MIOItemViewSet, basename='mioitem')
router.register('mioitemw', MIOItemwViewSet, basename='mioitemw')
# router.register('pack', PackViewSet, basename='pack')

View File

@ -7,6 +7,7 @@ from rest_framework.exceptions import ParseError
class UserFilterSet(filters.FilterSet):
ubelong_dept__name = filters.CharFilter(label='归属于该部门及以下(按名称)', method='filter_ubelong_dept__name')
ubelong_dept = filters.CharFilter(label='归属于该部门及以下', method='filter_ubelong_dept')
has_perm = filters.CharFilter(label='拥有指定权限标识', method='filter_has_perm')
class Meta:
model = User
@ -37,6 +38,9 @@ class UserFilterSet(filters.FilterSet):
except Exception as e:
raise ParseError(f"部门ID错误: {value} {str(e)}")
return queryset.filter(belong_dept__in=depts)
def filter_has_perm(self, queryset, name, value):
return queryset.filter(up_user__post__pr_post__role__perms__codes__contains=value)
class DeptFilterSet(filters.FilterSet):

View File

@ -0,0 +1,120 @@
# Generated by Django 4.2.27 on 2026-01-16 06:41
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('system', '0006_auto_20241213_1249'),
]
operations = [
migrations.AlterField(
model_name='dept',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='dept',
name='third_info',
field=models.JSONField(blank=True, default=dict, verbose_name='三方系统信息'),
),
migrations.AlterField(
model_name='dept',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='dictionary',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='dictionary',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='dicttype',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='dicttype',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='file',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='file',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='myschedule',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='myschedule',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='post',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='post',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='postrole',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='pr_post', to='system.post', verbose_name='关联岗位'),
),
migrations.AlterField(
model_name='postrole',
name='role',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='pr_role', to='system.role', verbose_name='关联角色'),
),
migrations.AlterField(
model_name='role',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='role',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='user',
name='belong_dept',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_belong_dept', to='system.dept', verbose_name='所属部门'),
),
migrations.AlterField(
model_name='user',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='user',
name='roles',
field=models.ManyToManyField(blank=True, to='system.role', verbose_name='关联角色'),
),
migrations.AlterField(
model_name='user',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
]

View File

@ -54,7 +54,7 @@ class Dept(ParentModel, CommonAModel):
name = models.CharField('名称', max_length=60)
type = models.CharField('类型', max_length=20, default='dept')
sort = models.PositiveSmallIntegerField('排序标记', default=1)
third_info = models.JSONField('三方系统信息', default=dict)
third_info = models.JSONField('三方系统信息', default=dict, blank=True)
class Meta:
verbose_name = '部门'
@ -107,9 +107,9 @@ class PostRole(BaseModel):
data_range = models.PositiveSmallIntegerField('数据权限范围', choices=DataFilter.choices,
default=DataFilter.THISLEVEL_AND_BELOW)
post = models.ForeignKey(Post, verbose_name='关联岗位',
on_delete=models.CASCADE)
on_delete=models.CASCADE, related_name="pr_post")
role = models.ForeignKey(Role, verbose_name='关联角色',
on_delete=models.CASCADE)
on_delete=models.CASCADE, related_name='pr_role')
class SoftDeletableUserManager(SoftDeletableManagerMixin, UserManager):
@ -132,7 +132,7 @@ class User(AbstractUser, CommonBModel):
posts = models.ManyToManyField(
Post, through='system.userpost', related_name='user_posts')
depts = models.ManyToManyField(Dept, through='system.userpost')
roles = models.ManyToManyField(Role, verbose_name='关联角色')
roles = models.ManyToManyField(Role, verbose_name='关联角色', blank=True)
# 关联账号
secret = models.CharField('密钥', max_length=100, null=True, blank=True)

View File

@ -322,7 +322,7 @@ def phone_exist(phone):
def user_exist(username):
if User.objects.filter(username=username).exists():
if User.objects.get_queryset(all=True).filter(username=username).exists():
raise serializers.ValidationError(**USERNAME_EXIST)
return username

View File

@ -300,9 +300,15 @@ class ComplexQueryMixin:
page = self.paginate_queryset(new_qs)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
rdata = serializer.data
if hasattr(self, 'add_info_for_list'):
rdata = self.add_info_for_list(rdata)
return self.get_paginated_response(rdata)
serializer = self.get_serializer(new_qs, many=True)
return Response(serializer.data)
rdata = serializer.data
if hasattr(self, 'add_info_for_list'):
rdata = self.add_info_for_list(rdata)
return Response(rdata)
class MyLoggingMixin(object):
"""Mixin to log requests"""

View File

@ -150,7 +150,33 @@ class BaseModel(models.Model):
raise
time.sleep(0.1 * (attempt + 1))
@classmethod
def locked_get_or_create(cls, defaults: dict, **kwargs):
"""
仅用于事务内
并发安全的 get_or_create
"""
if not connection.in_atomic_block:
raise RuntimeError("locked_get_or_create 必须在事务中调用")
defaults = defaults or {}
qs = cls.objects.select_for_update().filter(**kwargs)
cnt = qs.count()
if cnt > 1:
raise RuntimeError(
f"{cls.__name__} 数据异常:定位条件 {kwargs} 命中 {cnt}"
)
if cnt == 1:
return qs.get(), False
params = {**kwargs, **defaults}
obj = cls.objects.create(**params)
return obj, True
def handle_parent(self):
pass

View File

@ -1,8 +1,8 @@
from django_filters import rest_framework as filters
from apps.wpm.models import (SfLog, StLog, WMaterial, Mlog, Mlogbw,
Handover, Mgroup, Mlogb, Mtask, BatchSt)
Handover, Mgroup, Mlogb, Mtask, BatchSt, Handoverb)
from apps.mtm.models import Route, Material
from django.db.models import Q
from django.db.models import Q, Exists, OuterRef
from rest_framework.exceptions import ParseError
from datetime import datetime
@ -43,6 +43,7 @@ class WMaterialFilter(filters.FilterSet):
material__process__exclude = filters.CharFilter(field_name="material__process", lookup_expr="exact", exclude=True)
mlog_date_start = filters.DateFilter(label="产出开始", method="filter_mlog_date_start")
mlog_date_end = filters.DateFilter(label="产出结束", method="filter_mlog_date_end")
current_merged = filters.BooleanFilter(label="是否本工段新合成的批", method="filter_current_merged")
def filter_mlog_date_start(self, queryset, name, value):
mgroupId = self.data.get("mgroup", None)
@ -101,6 +102,18 @@ class WMaterialFilter(filters.FilterSet):
raise ParseError('生产路线不存在!')
return queryset.filter(material=route.material_in)|queryset.filter(material__in=route.materials.all())
def filter_current_merged(self, queryset, name, value):
sub_qs = Handoverb.objects.filter(
wm_to=OuterRef("pk"),
handover__mtype=Handover.H_MERGE,
handover__submit_time__isnull=False
)
if value is True:
return queryset.annotate(_has_merge=Exists(sub_qs)).filter(_has_merge=True)
elif value is False:
return queryset.annotate(_has_merge=Exists(sub_qs)).filter(_has_merge=False)
return queryset
class Meta:
model = WMaterial
fields = {
@ -154,10 +167,16 @@ class MlogFilter(filters.FilterSet):
class HandoverFilter(filters.FilterSet):
cbatch = filters.CharFilter(label='批次号', method='filter_cbatch')
mgroup = filters.CharFilter(label='MgroupId', method='filter_mgroup')
mgroupx = filters.CharFilter(label='MgroupId', method='filter_mgroupx')
dept = filters.CharFilter(label='DeptId', method='filter_dept')
def filter_mgroup(self, queryset, name, value):
return queryset.filter(send_mgroup__id=value)|queryset.filter(recive_mgroup__id=value)
def filter_mgroupx(self, queryset, name, value):
dept = Mgroup.objects.get(id=value).belong_dept
return (queryset.filter(send_mgroup__id=value)|queryset.filter(recive_mgroup__id=value)|
queryset.filter(send_dept=dept, send_mgroup__isnull=True)|queryset.filter(recive_dept=dept, recive_mgroup__isnull=True))
def filter_dept(self, queryset, name, value):
return queryset.filter(send_dept__id=value)|queryset.filter(recive_dept__id=value)
@ -223,7 +242,11 @@ class MlogbFilter(filters.FilterSet):
class BatchStFilter(filters.FilterSet):
batch__startswith__in = filters.CharFilter(method='filter_batch')
data__has_key = filters.CharFilter(method='filter_data')
with_source_near = filters.CharFilter(label='来源', method='filter_source_near')
def filter_source_near(self, queryset, name, value):
return queryset
def filter_data(self, queryset, name, value):
return queryset.filter(data__has_key=value)

View File

@ -0,0 +1,117 @@
# Generated by Django 4.2.27 on 2026-01-16 07:29
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('system', '0007_alter_dept_create_by_alter_dept_third_info_and_more'),
('wpm', '0126_auto_20251208_1337'),
]
operations = [
migrations.AddField(
model_name='handoverb',
name='oinfo_json',
field=models.JSONField(blank=True, default=dict, verbose_name='其他信息'),
),
migrations.AlterField(
model_name='attlog',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='attlog',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='fmlog',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='fmlog',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='handover',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='handover',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='mlog',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='mlog',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='otherlog',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='otherlog',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='sflog',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='sflog',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='sflogexp',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='sflogexp',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='stlog',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='stlog',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
migrations.AlterField(
model_name='wmaterial',
name='belong_dept',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_belong_dept', to='system.dept', verbose_name='所属部门'),
),
migrations.AlterField(
model_name='wmaterial',
name='create_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_create_by', to=settings.AUTH_USER_MODEL, verbose_name='创建人'),
),
migrations.AlterField(
model_name='wmaterial',
name='update_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_update_by', to=settings.AUTH_USER_MODEL, verbose_name='最后编辑人'),
),
]

View File

@ -14,9 +14,11 @@ from django.db.models import Count
from django.db import transaction
from django.db.models import Max
import re
from django.db.models import Q
from django.db.models import Q, F
import django.utils.timezone as timezone
from apps.utils.sql import query_all_dict
import logging
myLogger = logging.getLogger('log')
# Create your models here.
class SfLog(CommonADModel):
@ -125,6 +127,10 @@ class WMaterial(CommonBDModel):
material_ofrom = models.ForeignKey(Material, verbose_name='原料物料', on_delete=models.SET_NULL, null=True, blank=True, related_name='wm_mofrom')
number_from = models.TextField("来源于个号", null=True, blank=True)
@property
def belong_dept_or_mgroup_id(self):
return self.mgroup.id if self.mgroup else self.belong_dept.id
@property
def count_working(self):
return Mlogb.objects.filter(wm_in=self, mlog__submit_time__isnull=True).aggregate(count=Sum('count_use'))['count'] or 0
@ -161,6 +167,30 @@ class WMaterial(CommonBDModel):
),
state__in=[WMaterial.WM_OK, WMaterial.WM_REPAIR]
)
@classmethod
def increase(cls, wm_id: str, user:User, count, count_eweight=None):
updates = {}
if count:
updates['count'] = F('count') + count
if count_eweight:
updates['count_eweight'] = count_eweight
if not updates:
return 0
updates["update_by"] = user
updates['update_time'] = timezone.now()
return cls.objects.filter(id=wm_id).update(**updates)
@classmethod
def decrease(cls, wm_id: str, user:User, count):
if not count:
return 0
updated = cls.objects.filter(id=wm_id, count__gte= count).update(
count=F('count') - count, update_by=user, update_time=timezone.now())
if updated == 0:
batch = WMaterial.objects.get(id=wm_id).batch
raise ParseError(f'{batch}_库存不足无法完成扣减')
return updated
class Fmlog(CommonADModel):
"""TN: 父级生产日志
@ -639,6 +669,7 @@ class Handoverb(BaseModel):
wm_to = models.ForeignKey(WMaterial, verbose_name='所到车间库存', on_delete=models.SET_NULL,
null=True, blank=True, related_name='handoverb_wm_to')
count = models.DecimalField('送料数', default=0, max_digits=11, decimal_places=1)
oinfo_json = models.JSONField('其他信息', default=dict, blank=True)
@property
def handoverbw(self):
@ -837,41 +868,46 @@ class BatchLog(BaseModel):
@classmethod
def batches_to(cls, batch:str):
# query = """
# SELECT batch FROM wpm_batchst
# WHERE batch ~ %s
# """
query = """
SELECT batch
FROM wpm_batchst
WHERE batch ~ %s
ORDER BY
-- 先按前缀部分排序例如 'A'
SUBSTRING(batch FROM '^(.*)-') DESC,
-- 再按后缀的数值部分排序 '2', '11' 转为整数
CAST(SUBSTRING(batch FROM '-([0-9]+)$') AS INTEGER) DESC
""" # 排序可在sql层处理
query_ = """SELECT batch FROM wpm_batchst WHERE batch ~ %s"""
pattern = f'^{batch}-[0-9]+$'
SELECT
batch,
CAST(substring(batch FROM LENGTH(%s) + 2) AS INTEGER) AS batch_num
FROM wpm_batchst
WHERE batch LIKE %s AND translate(
substring(batch FROM LENGTH(%s) + 2),
'0123456789',
''
) = ''
ORDER BY batch_num DESC
"""
"""可以用如下方法直接查询
"""
# batches = BatchLog.objects.filter(source__batch=batch, relation_type="split").values_list("target__batch", flat=True).distinct()
# batches = sorted(list(batches), key=custom_key)
batches_r = query_all_dict(query_, params=(pattern,))
batches = [b["batch"] for b in batches_r]
batches = sorted(list(batches), key=custom_key)
last_batch_num = None
if batches:
last_batch = batches[-1]
last_batch_list = last_batch.split("-")
if last_batch_list:
try:
last_batch_num = int(last_batch_list[-1])
except Exception:
pass
return {"batches": batches, "last_batch_num": last_batch_num, "last_batch": last_batch}
return {"batches": [], "last_batch_num": None, "last_batch": None}
prefix = batch
params = (
prefix,
f"{prefix}-%",
prefix
)
try:
rows = query_all_dict(query, params=params)
except Exception as e:
myLogger.error(f"BatchLog.batches_to error: {(str(e), query, params)}")
raise
if not rows:
return {
"batches": [],
"last_batch_num": None,
"last_batch": None,
}
batches = [r["batch"] for r in rows]
last = rows[0]
return {
"batches": batches,
"last_batch_num": last["batch_num"],
"last_batch": last["batch"],
}

View File

@ -292,7 +292,7 @@ def main(batch: str, mgroup_obj=None):
data["六车间交接领料_接料人"] = ";".join([item.name for item in data["六车间交接领料_接料人"]])
# 六车间工段生产数据
mgroup_list = ["平头", "粘铁头", "粗中细磨", "平磨", "掏管", "抛光", "开槽", "倒角"]
mgroup_list = ["平头", "粘铁头", "粗中细磨", "平磨", "掏管", "抛光", "开槽", "倒角", "加工前检验", "中检"]
for mgroup_name in mgroup_list:
if mgroup_name == '粗中细磨':
mgroups = Mgroup.objects.filter(name__in=['粗磨', '粗中磨', '粗中细磨'])

View File

@ -1,36 +1,67 @@
from apps.wpmw.models import Wpr
from apps.wpm.models import Mlogbw
from apps.qm.models import Ftest, FtestDefect, FtestItem
from apps.wpm.models import Mlogbw, Mlog, MlogUser
from apps.qm.models import Ftest, FtestDefect, FtestItem, TestItem
from rest_framework.exceptions import ParseError
from apps.mtm.models import Mgroup
def main(wprId, mgroup:Mgroup):
def main(wprId, mgroup:Mgroup=None):
wpr = Wpr.objects.get(id=wprId)
if mgroup is None:
mgroup_ids = Mlogbw.objects.filter(
wpr=wpr,
mlogb__mlog__submit_time__isnull=False,
mlogb__mlog__is_fix=False
).values_list(
'mlogb__mlog__mgroup',
flat=True
).distinct()
mgroups = Mgroup.objects.filter(id__in=mgroup_ids)
else:
mgroups = [mgroup]
data = {}
mgroup_name = mgroup.name
mlogbw = Mlogbw.objects.filter(wpr=wpr, mlogb__mlog__mgroup=mgroup, mlogb__mlog__submit_time__isnull=False).order_by("-update_time").first()
if mlogbw:
data[f"{mgroup_name}_批次号"] = mlogbw.mlogb.batch
data[f"{mgroup_name}_日期"] = mlogbw.mlogb.mlog.handle_date.strftime("%Y-%m-%d")
ftestitems = FtestItem.objects.filter(ftest__mlogbw_ftest__wpr=wpr,
for mgroup in mgroups:
mgroup_name = mgroup.name
mlogbw = Mlogbw.objects.filter(wpr=wpr,
mlogb__mlog__mgroup=mgroup,
mlogb__mlog__submit_time__isnull=False, mlogb__mlog__is_fix=False).order_by("-update_time").first()
if mlogbw:
mlog:Mlog = mlogbw.mlogb.mlog
data[f"{mgroup_name}_批次号"] = mlogbw.mlogb.batch
data[f"{mgroup_name}_设备编号"] = mlog.equipment.number if mlog.equipment else None
data[f"{mgroup_name}_操作人"] = mlog.handle_user.name if mlog.handle_user else None
data[f"{mgroup_name}_日期"] = mlog.handle_date.strftime("%Y-%m-%d")
# 日志操作数据
if mlog.oinfo_json:
oinfo_keys = list(mlog.oinfo_json.keys())
oinfo_keys_qs = TestItem.objects.filter(id__in=oinfo_keys)
for item in oinfo_keys_qs:
data[f"{mgroup_name}_操作项_{item.name}"] = mlog.oinfo_json[item.id]
# 子工序操作人和日期
mlogusers = MlogUser.objects.filter(mlog=mlog)
if mlogusers.exists():
datab = mlogusers.values("handle_user__name", "process__name", "handle_date")
for ind, item in enumerate(datab):
data[f"{mgroup_name}_{item['process__name']}_操作人"] = item["handle_user__name"]
data[f"{mgroup_name}_{item['process__name']}_日期"] = item["handle_date"].strftime("%Y-%m-%d")
# 检测数据
ftestitems = FtestItem.objects.filter(ftest__mlogbw_ftest__wpr=wpr,
ftest__mlogbw_ftest__mlogb__mlog__mgroup=mgroup,
ftest__mlogbw_ftest__mlogb__mlog__submit_time__isnull=False,
ftest__mlogbw_ftest__mlogb__mlog__is_fix=False)
for ftestitem in ftestitems:
data[f"{mgroup_name}_检测项_{ftestitem.testitem.name}"] = ftestitem.test_val_json
ftestdefects = FtestDefect.objects.filter(ftest__mlogbw_ftest__wpr=wpr,
ftest__mlogbw_ftest__mlogb__mlog__mgroup=mgroup,
ftest__mlogbw_ftest__mlogb__mlog__submit_time__isnull=False,
ftest__mlogbw_ftest__mlogb__mlog__is_fix=False)
for ftestitem in ftestitems:
data[f"{mgroup_name}_检测项_{ftestitem.testitem.name}"] = ftestitem.test_val_json
ftestdefects = FtestDefect.objects.filter(ftest__mlogbw_ftest__wpr=wpr,
ftest__mlogbw_ftest__mlogb__mlog__mgroup=mgroup,
ftest__mlogbw_ftest__mlogb__mlog__submit_time__isnull=False,
ftest__mlogbw_ftest__mlogb__mlog__is_fix=False)
for ftestdefect in ftestdefects:
data[f"{mgroup_name}_缺陷项_{ftestdefect.defect.name}"] = 1 if ftestdefect.has is True else 0
old_data:dict = wpr.data
if old_data:
for item in list(old_data.keys()):
if f'{mgroup_name}_' in item:
del old_data[item]
old_data.update(data)
wpr.data = old_data
wpr.save(update_fields=["data"])
for ftestdefect in ftestdefects:
data[f"{mgroup_name}_缺陷项_{ftestdefect.defect.name}"] = 1 if ftestdefect.has is True else 0
old_data:dict = wpr.data
if old_data:
for item in list(old_data.keys()):
if f'{mgroup_name}_' in item:
del old_data[item]
old_data.update(data)
wpr.data = old_data
wpr.save(update_fields=["data"])

View File

@ -1261,12 +1261,18 @@ class HandoverSerializer(CustomModelSerializer):
next_mat = new_wm.material
next_state = new_wm.state
next_defect = new_wm.defect
deptOrmgroupId = None
for ind, item in enumerate(attrs['handoverb']):
if item["count"] > 0:
pass
else:
raise ParseError(f'{ind+1}行-交接数量必须大于0')
wm = item["wm"]
wm: WMaterial = item["wm"]
current_mdept_id = wm.belong_dept_or_mgroup_id
if deptOrmgroupId is None:
deptOrmgroupId = current_mdept_id
elif deptOrmgroupId != current_mdept_id:
raise ParseError(f'{ind+1}行-交接物料所属工段/车间不一致')
if mtype == Handover.H_MERGE:
if next_mat is None:
next_mat = wm.material

View File

@ -22,7 +22,7 @@ from ..qm.models import Defect, Ftest
from django.db.models import Count, Q
from apps.utils.tasks import ctask_run
from apps.mtm.models import Process
from apps.utils.lock import lock_model_record_d_func
from django.db.models import F
myLogger = logging.getLogger('log')
@ -150,11 +150,12 @@ def get_pcoal_heat(year_s: int, month_s: int, day_s: int):
myLogger.error(f'获取煤粉热值失败,{e}, {year_s}, {month_s}, {day_s}', exc_info=True)
return 25000
# @lock_model_record_d_func(Mlog)
def mlog_submit(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
"""
生产日志提交后需要执行的操作
"""
mlog = Mlog.objects.select_for_update().get(id=mlog.id)
if mlog.work_start_time and mlog.work_start_time > timezone.now():
raise ParseError('操作开始时间不能晚于当前时间')
if mlog.work_start_time and mlog.work_end_time and mlog.work_end_time < mlog.work_start_time:
@ -223,21 +224,21 @@ def mlog_submit(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
# 需要判断领用数是否合理
# 优先使用工段库存
if isinstance(mlog_or_b, Mlogb) and mlog_or_b.wm_in:
wm_qs = WMaterial.objects.filter(id=mlog_or_b.wm_in.id)
wm = WMaterial.objects.select_for_update().get(id=mlog_or_b.wm_in.id)
else:
wm_qs = WMaterial.objects.filter(batch=mi_batch, material=mi_ma, mgroup=mgroup, state=WMaterial.WM_OK)
if not wm_qs.exists():
wm_qs = WMaterial.objects.filter(batch=mi_batch, material=mi_ma,
belong_dept=belong_dept, mgroup=None, state=WMaterial.WM_OK)
count_x = wm_qs.count()
if count_x == 1:
wm = wm_qs.first()
elif count_x == 0:
raise ParseError(
f'{str(mi_ma)}-{mi_batch}-批次库存不存在!')
else:
raise ParseError(
f'{str(mi_ma)}-{mi_batch}-存在多个相同批次!')
count_x = wm_qs.count()
if count_x == 1:
wm = WMaterial.objects.select_for_update().get(id=wm_qs.first().id)
elif count_x == 0:
raise ParseError(
f'{str(mi_ma)}-{mi_batch}-批次库存不存在!')
else:
raise ParseError(
f'{str(mi_ma)}-{mi_batch}-存在多个相同批次!')
if mi_count > wm.count:
raise ParseError(
@ -260,7 +261,7 @@ def mlog_submit(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
if count <= 0:
raise ParseError('存在非正数!')
lookup = {'batch': batch, 'material': material, 'mgroup': mgroup, 'defect': defect, 'state': WMaterial.WM_NOTOK}
wm, is_create = WMaterial.objects.get_or_create(**lookup, defaults={"belong_dept": belong_dept})
wm, is_create = WMaterial.locked_get_or_create(**lookup, defaults={"belong_dept": belong_dept})
wm.count = wm.count + count
if is_create:
wm.create_by = user
@ -343,7 +344,7 @@ def mlog_submit(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
else:
lookup['belong_dept'] = belong_dept
wm, is_create2 = WMaterial.objects.get_or_create(**lookup, defaults={**lookup, "belong_dept": belong_dept})
wm, is_create2 = WMaterial.locked_get_or_create(**lookup, defaults={"belong_dept": belong_dept})
wm.count = wm.count + mo_count
wm.count_eweight = mo_count_eweight
wm.update_by = user
@ -401,16 +402,17 @@ def mlog_submit(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
ana_batch_thread(xbatchs=xbatches, mgroup=mlog.mgroup)
# 触发单个统计
wprIds = list(Mlogbw.objects.filter(mlogb__mlog=mlog, ftest__isnull=False, wpr__isnull=False).values_list('wpr__id', flat=True))
wprIds = list(Mlogbw.objects.filter(mlogb__mlog=mlog, wpr__isnull=False).values_list('wpr__id', flat=True))
if wprIds:
ana_wpr_thread(wprIds, mlog.mgroup)
# @lock_model_record_d_func(Mlog)
def mlog_revert(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
"""日志撤回
"""
# if mlog.submit_time is None:
# return
mlog = Mlog.objects.select_for_update().get(id=mlog.id)
if now is None:
now = timezone.now()
@ -506,6 +508,8 @@ def mlog_revert(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
else:
raise ParseError(
f'{str(mo_ma)}-{mo_batch}-存在多个相同批次!')
wm = WMaterial.objects.select_for_update().get(id=wm.id)
wm.count = wm.count - mo_count
if wm.count < 0:
raise ParseError(f'{wm.batch} 车间库存不足, 产物无法回退')
@ -547,7 +551,7 @@ def mlog_revert(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
if mi_count <= 0:
raise ParseError('存在非正数!')
if isinstance(mlog_or_b, Mlogb) and mlog_or_b.wm_in:
wm = WMaterial.objects.get(id=mlog_or_b.wm_in.id)
wm = WMaterial.objects.select_for_update().get(id=mlog_or_b.wm_in.id)
else:
# 针对光子的情况实际上必须需要wm_in
lookup = {'batch': mi_batch, 'material': mi_ma, 'mgroup': None, 'state': WMaterial.WM_OK}
@ -557,7 +561,7 @@ def mlog_revert(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
else:
lookup['belong_dept'] = belong_dept
wm, _ = WMaterial.objects.get_or_create(**lookup, defaults={**lookup, "belong_dept": belong_dept})
wm, _ = WMaterial.locked_get_or_create(**lookup, defaults={"belong_dept": belong_dept})
wm.count = wm.count + mi_count
wm.update_by = user
wm.save()
@ -579,7 +583,7 @@ def mlog_revert(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
lookup['mgroup'] = mgroup
else:
lookup['belong_dept'] = belong_dept
wm, is_create = WMaterial.objects.get_or_create(**lookup, defaults={**lookup, "belong_dept": belong_dept})
wm, is_create = WMaterial.locked_get_or_create(**lookup, defaults={"belong_dept": belong_dept})
wm.count = wm.count - count
if wm.count < 0:
raise ParseError('加工前不良数量大于库存量')
@ -619,7 +623,7 @@ def mlog_revert(mlog: Mlog, user: User, now: Union[datetime.datetime, None]):
ana_batch_thread(xbatches, mgroup=mlog.mgroup)
# 触发单个统计
wprIds = list(Mlogbw.objects.filter(mlogb__mlog=mlog, ftest__isnull=False, wpr__isnull=False).values_list('wpr__id', flat=True))
wprIds = list(Mlogbw.objects.filter(mlogb__mlog=mlog, wpr__isnull=False).values_list('wpr__id', flat=True))
if wprIds:
ana_wpr_thread(wprIds, mlog.mgroup)
@ -696,11 +700,15 @@ def update_mtask(mtask: Mtask, fill_way: int = 10):
# utask.state = Utask.UTASK_SUBMIT
utask.save()
@lock_model_record_d_func(Handover)
def handover_submit(handover:Handover, user: User, now: Union[datetime.datetime, None]):
"""
交接提交后需要执行的操作
"""
handover = (
Handover.objects
.select_for_update()
.get(pk=handover.pk)
)
if handover.submit_time is not None:
return
now = timezone.now()
@ -744,7 +752,11 @@ def handover_submit(handover:Handover, user: User, now: Union[datetime.datetime,
wmId, xcount, handover_or_b = item
if xcount <= 0:
raise ParseError("存在非正数!")
wm_from = WMaterial.objects.get(id=wmId)
wm_from = (
WMaterial.objects
.select_for_update()
.get(id=wmId)
)
mids.append(wm_from.material.id)
# 合并为新批
@ -768,25 +780,17 @@ def handover_submit(handover:Handover, user: User, now: Union[datetime.datetime,
batch = wm_from.batch
batches.append(batch)
if wm_from is None:
raise ParseError(f'{wm_from.batch} 找不到车间库存')
count_x = wm_from.count - xcount
if count_x < 0:
raise ParseError(f'{wm_from.batch} 车间库存不足!')
else:
wm_from.count = count_x
wm_from.save()
WMaterial.decrease(wm_id=wm_from.id, user=user, count=xcount)
if need_add:
# 开始变动
if handover.type == Handover.H_NORMAL:
if mtype == Handover.H_MERGE and handover.new_wm:
wm_to = handover.new_wm
wm_to = WMaterial.objects.select_for_update().get(id=handover.new_wm.id)
if wm_to.state != wm_from.state or wm_to.material != wm_from.material or wm_to.defect != wm_from.defect:
raise ParseError("正常合并到的车间库存状态或物料异常")
else:
wm_to, _ = WMaterial.objects.get_or_create(
wm_to, _ = WMaterial.locked_get_or_create(
batch=batch,
material=material,
mgroup=recive_mgroup,
@ -806,11 +810,11 @@ def handover_submit(handover:Handover, user: User, now: Union[datetime.datetime,
recive_mgroup = handover.recive_mgroup
wm_state = WMaterial.WM_REPAIR
if mtype == Handover.H_MERGE and handover.new_wm:
wm_to = handover.new_wm
wm_to = WMaterial.objects.select_for_update().get(id=handover.new_wm.id)
if wm_to.state != WMaterial.WM_REPAIR or wm_to.material != wm_from.material or wm_to.defect != wm_from.defect:
raise ParseError("返修合并到的车间库存状态或物料异常")
elif recive_mgroup:
wm_to, _ = WMaterial.objects.get_or_create(
wm_to, _ = WMaterial.locked_get_or_create(
batch=batch,
material=material,
mgroup=recive_mgroup,
@ -828,28 +832,13 @@ def handover_submit(handover:Handover, user: User, now: Union[datetime.datetime,
)
else:
raise ParseError("返工交接必须指定接收工段")
elif handover.type == Handover.H_TEST:
raise ParseError("检验交接已废弃")
wm_to, _ = WMaterial.objects.get_or_create(
batch=batch,
material=material,
mgroup=recive_mgroup,
state=WMaterial.WM_TEST,
belong_dept=recive_dept,
defaults={
"count_xtest": 0,
"batch_ofrom": wm_from.batch_ofrom,
"material_ofrom": wm_from.material_ofrom,
"create_by": user
},
)
elif handover.type == Handover.H_SCRAP:
if mtype == Handover.H_MERGE and handover.new_wm:
wm_to = handover.new_wm
wm_to = WMaterial.objects.select_for_update().get(id=handover.new_wm.id)
if wm_to.state != WMaterial.WM_SCRAP or wm_to.material != wm_from.material or wm_to.defect != wm_from.defect:
raise ParseError("报废合并到的车间库存状态或物料异常")
elif recive_mgroup:
wm_to, _ = WMaterial.objects.get_or_create(
wm_to, _ = WMaterial.locked_get_or_create(
batch=batch,
material=material,
mgroup=recive_mgroup,
@ -868,11 +857,11 @@ def handover_submit(handover:Handover, user: User, now: Union[datetime.datetime,
raise ParseError("不支持非工段报废")
elif handover.type == Handover.H_CHANGE:
if mtype == Handover.H_MERGE and handover.new_wm:
wm_to = handover.new_wm
wm_to = WMaterial.objects.select_for_update().get(id=handover.new_wm.id)
if wm_to.material != handover.material_changed or wm_to.state != handover.state_changed:
raise ParseError("改版合并到的车间库存状态或物料异常")
elif handover.recive_mgroup:
wm_to, _ = WMaterial.objects.get_or_create(
wm_to, _ = WMaterial.locked_get_or_create(
batch=batch,
material=handover.material_changed,
state=handover.state_changed,
@ -895,9 +884,9 @@ def handover_submit(handover:Handover, user: User, now: Union[datetime.datetime,
if wm_from and wm_from.state != WMaterial.WM_OK:
raise ParseError("仅合格品支持退回")
if mtype == Handover.H_MERGE and handover.new_wm:
wm_to = handover.new_wm
wm_to = WMaterial.objects.select_for_update().get(id=handover.new_wm.id)
else:
wm_to, _ = WMaterial.objects.get_or_create(
wm_to, _ = WMaterial.locked_get_or_create(
batch=batch,
material=material,
mgroup=recive_mgroup,
@ -915,9 +904,7 @@ def handover_submit(handover:Handover, user: User, now: Union[datetime.datetime,
else:
raise ParseError("不支持该交接类型")
wm_to.count = wm_to.count + xcount
wm_to.count_eweight = handover.count_eweight # 这行代码有隐患
wm_to.save()
WMaterial.increase(wm_id=wm_to.id, user=user,count=xcount, count_eweight=handover.count_eweight if handover.count_eweight else None)
handover_or_b.wm_to = wm_to
handover_or_b.save()
if material.tracking == Material.MA_TRACKING_SINGLE:
@ -932,7 +919,8 @@ def handover_submit(handover:Handover, user: User, now: Union[datetime.datetime,
for item in handoverbws:
wpr:Wpr = item.wpr
Wpr.change_or_new(wpr=wpr, wm=wm_to, old_wm=wpr.wm, old_mb=wpr.mb)
if wm_to.count != Wpr.objects.filter(wm=wm_to).count():
db_count = WMaterial.objects.filter(id=wm_to.id).values_list("count", flat=True).get()
if db_count != Wpr.objects.filter(wm=wm_to).count():
raise ParseError("交接与明细数量不一致2,操作失败")
handover.submit_user = user
@ -943,8 +931,8 @@ def handover_submit(handover:Handover, user: User, now: Union[datetime.datetime,
ana_batch_thread(xbatchs=batches)
@lock_model_record_d_func(Handover)
def handover_revert(handover:Handover, handler:User=None):
handover = Handover.objects.select_for_update().get(id=handover.id)
if handover.submit_time is None:
raise ParseError('该交接单未提交!')
ticket:Ticket = handover.ticket
@ -976,16 +964,13 @@ def handover_revert(handover:Handover, handler:User=None):
# 此时是自己交给自己,不需要做任何操作
pass
else:
wm.count = wm.count + item.count
wm.save()
wm_to.count = wm_to.count - item.count
if wm_to.count < 0:
raise ParseError('库存不足无法撤回!')
wm_to.save()
WMaterial.increase(wm_id=wm.id, user=handler, count=item.count)
WMaterial.decrease(wm_id=wm_to.id, user=handler, count=item.count)
if material.tracking == Material.MA_TRACKING_SINGLE:
handoverbws = Handoverbw.objects.filter(handoverb=item)
if handoverbws.count() != item.count:
raise ParseError("交接与明细数量不一致,操作失败")
wm = WMaterial.objects.get(id=wm.id)
for item in handoverbws:
wpr:Wpr = item.wpr
Wpr.change_or_new(wpr=wpr, wm=wm, old_wm=wpr.wm, old_mb=wpr.mb, add_version=False)

View File

@ -20,9 +20,9 @@ router.register('sflogexp', SfLogExpViewSet, basename='sflogexp')
router.register('wmaterial', WMaterialViewSet, basename='wmaterial')
router.register('fmlog', FmlogViewSet, basename='fmlog')
router.register('mlog', MlogViewSet, basename='mlog')
router.register('mlogb', MlogbViewSet)
router.register('mlogb/in', MlogbInViewSet)
router.register('mlogb/out', MlogbOutViewSet)
router.register('mlogb', MlogbViewSet, basename='mlogb')
router.register('mlogb/in', MlogbInViewSet, basename='mlogb_in')
router.register('mlogb/out', MlogbOutViewSet, basename='mlogb_out')
router.register('handover', HandoverViewSet, basename='handover')
router.register('attlog', AttlogViewSet, basename='attlog')
router.register('otherlog', OtherLogViewSet, basename='otherlog')

View File

@ -1078,7 +1078,8 @@ class BatchStViewSet(CustomListModelMixin, ComplexQueryMixin, CustomGenericViewS
filterset_class = BatchStFilter
def add_info_for_list(self, data):
if self.request.query_params.get("with_source_near", None) == "yes":
if (self.request.query_params.get("with_source_near", None) == "yes" or
self.request.data.get("with_source_near", None) == "yes"):
batchstIds = [ins["id"] for ins in data]
batchlog_qs = BatchLog.objects.filter(target__id__in=batchstIds).values("id", "source", "target")
source_data = BatchStSerializer(instance=BatchSt.objects.filter(id__in=[ins["source"] for ins in batchlog_qs]), many=True).data

View File

@ -18,6 +18,7 @@ class WprFilter(filters.FilterSet):
"wm": ["exact", "isnull"],
"material__process": ["exact"],
"material__name": ["exact", "contains"],
"wpr_from": ["exact", "isnull"],
"state": ["exact"],
"defects": ["exact"],
"number": ["exact"]

View File

@ -1,6 +1,6 @@
from rest_framework.decorators import action
from apps.utils.viewsets import CustomModelViewSet, CustomGenericViewSet
from apps.utils.mixins import CustomListModelMixin, RetrieveModelMixin, ComplexQueryMixin
from apps.utils.mixins import CustomListModelMixin, CustomRetrieveModelMixin, ComplexQueryMixin
from apps.wpmw.models import Wpr, WprDefect
from apps.wpmw.serializers import WprSerializer, WprNewSerializer, WprDetailSerializer, WproutListSerializer, WprChangeNumberSerializer
@ -13,7 +13,7 @@ from apps.utils.sql import query_one_dict
from django.db.models.expressions import RawSQL
class WprViewSet(CustomListModelMixin, RetrieveModelMixin, ComplexQueryMixin, CustomGenericViewSet):
class WprViewSet(CustomListModelMixin, CustomRetrieveModelMixin, ComplexQueryMixin, CustomGenericViewSet):
"""动态产品
动态产品
@ -34,6 +34,16 @@ class WprViewSet(CustomListModelMixin, RetrieveModelMixin, ComplexQueryMixin, Cu
"number_suffix": RawSQL("COALESCE(NULLIF(regexp_replace(wpmw_wpr.number, '.*?(\\d+)$', '\\1'), ''), '0')::bigint", []),
}
def add_info_for_list(self, data):
parent_ids = [item["wpr_from"] for item in data if item.get("wpr_from", False)]
if parent_ids:
parent_data = Wpr.objects.filter(id__in=parent_ids).values("id", "number", "data")
parent_map = {item["id"]: item for item in parent_data}
for item in data:
if item["wpr_from"]:
item["wpr_from_"] = parent_map[item["wpr_from"]]
return data
def filter_queryset(self, queryset):
qs = super().filter_queryset(queryset)
if "mb__isnull" in self.request.query_params or "wm__isnull" in self.request.query_params:
@ -92,11 +102,20 @@ class WprViewSet(CustomListModelMixin, RetrieveModelMixin, ComplexQueryMixin, Cu
# 使用原始sql
query = """
SELECT id, number_out FROM wpmw_wpr
WHERE number_out ~ %s order by number_out desc limit 1
WHERE number_out LIKE %s
AND translate(
substring(number_out FROM LENGTH(%s) + 2),
'0123456789',
''
) = ''
order by number_out desc limit 1
"""
pattern = f"^{prefix}[0-9]+$"
params = (
f"{prefix}-%",
prefix
)
number_outs = []
wpr_qs_last = query_one_dict(query, [pattern])
wpr_qs_last = query_one_dict(query, [params])
if wpr_qs_last:
number_outs.append(wpr_qs_last["number_out"])
# 查找未出库的记录
@ -106,9 +125,15 @@ class WprViewSet(CustomListModelMixin, RetrieveModelMixin, ComplexQueryMixin, Cu
query2 = """
select mioitemw.id, mioitemw.number_out from inm_mioitemw mioitemw left join inm_mioitem mioitem on mioitem.id = mioitemw.mioitem_id
left join inm_mio mio on mio.id = mioitem.mio_id
where mio.submit_time is null and mioitemw.number_out ~ %s order by mioitemw.number_out desc limit 1
where mio.submit_time is null and mioitemw.number_out LIKE %s
AND translate(
substring(mioitemw.number_out FROM LENGTH(%s) + 2),
'0123456789',
''
) = ''
order by mioitemw.number_out desc limit 1
"""
mioitemw_last = query_one_dict(query2, [pattern])
mioitemw_last = query_one_dict(query2, [params])
if mioitemw_last:
number_outs.append(mioitemw_last["number_out"])
if number_outs:

View File

@ -1,37 +1,82 @@
celery==5.2.3
Django==3.2.12
django-celery-beat==2.3.0
django-celery-results==2.4.0
django-cors-headers==3.11.0
django-filter==21.1
djangorestframework==3.13.1
djangorestframework-simplejwt==5.1.0
drf-yasg==1.21.7
psutil==5.9.0
pillow==9.0.1
opencv-python==4.5.5.62
redis==4.4.0
django-redis==5.2.0
user-agents==2.2.0
daphne==4.0.0
channels-redis==4.0.0
# =======================
# Core
# =======================
Django==4.2.27
djangorestframework==3.16.1
django-filter==23.5
django-cors-headers==4.9.0
djangorestframework-simplejwt==5.5.1
django-restql==0.15.2
# =======================
# Celery
# =======================
celery==5.6.2
django-celery-beat==2.8.1
django-celery-results==2.6.0
redis==7.1.0
django-redis==6.0.0
cron-descriptor==1.2.35
# =======================
# Channels / ASGI
# =======================
channels==4.3.2
daphne==4.0.0
channels-redis==4.3.0
# =======================
# API Docs
# =======================
drf-yasg==1.21.7
# =======================
# Auth / Utils
# =======================
user-agents==2.2.0
psutil==5.9.0
# =======================
# Media / Image / CV
# =======================
pillow==9.5.0
opencv-python==4.5.5.62
shapely==1.8.3
aliyun-python-sdk-core==2.13.36
baidu-aip==4.16.6
chardet==5.0.0
requests==2.28.1
# =======================
# Network / RPC
# =======================
requests==2.32.5
grpcio==1.47.0
grpcio-tools==1.47.0
protobuf==3.20.1
pycryptodome==3.15.0
# =======================
# Cloud SDK
# =======================
aliyun-python-sdk-core==2.13.36
baidu-aip==4.16.6
# =======================
# Crypto
# =======================
pycryptodome==3.15.0
# =======================
# Excel / Docs
# =======================
xlwt==1.3.0
openpyxl==3.1.0
cron-descriptor==1.2.35
pymysql==1.0.3
# face-recognition==1.3.0
openpyxl==3.1.5
docxtpl==0.16.7
# =======================
# DB
# =======================
pymysql==1.0.3
# =======================
# IoT / MQTT
# =======================
paho-mqtt==2.0.0
# deepface==0.0.79
# edge-tts==6.1.12

View File

@ -8,13 +8,16 @@ https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
import django
from channels.routing import ProtocolTypeRouter, URLRouter
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'server.settings')
django.setup()
from django.core.asgi import get_asgi_application
from apps.utils.middlewares import TokenAuthMiddleware
import apps.ws.routing
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'server.settings')
application = ProtocolTypeRouter({
"http": get_asgi_application(),
"websocket": TokenAuthMiddleware(