Merge branch 'master' of http://gitea.xxhhcty.xyz:8080/zcdsj/factory
This commit is contained in:
commit
362a5ef725
|
@ -1,18 +1,19 @@
|
|||
from django.urls import path, include
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from apps.enm.views import (MpointViewSet, MpLogxViewSet, MpointStatViewSet,
|
||||
EnStatViewSet, EnStat2ViewSet, XscriptViewSet)
|
||||
from apps.enm.views import (MpointViewSet, MpointStatViewSet,
|
||||
EnStatViewSet, EnStat2ViewSet, XscriptViewSet, MpLogxAPIView)
|
||||
|
||||
API_BASE_URL = 'api/enm/'
|
||||
HTML_BASE_URL = 'dhtml/enm/'
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register('mpoint', MpointViewSet, basename='mpoint')
|
||||
router.register('mplogx', MpLogxViewSet, basename='mplogx')
|
||||
# router.register('mplogx', MpLogxViewSet, basename='mplogx')
|
||||
router.register('mpointstat', MpointStatViewSet, basename='mpointstat')
|
||||
router.register('enstat', EnStatViewSet, basename='enstat')
|
||||
router.register('enstat2', EnStat2ViewSet, basename='enstat2')
|
||||
router.register('xscript', XscriptViewSet, basename='xscript')
|
||||
urlpatterns = [
|
||||
path(API_BASE_URL, include(router.urls)),
|
||||
path(f'{API_BASE_URL}mplogx/', MpLogxAPIView.as_view(), name='mplogx_list'),
|
||||
]
|
|
@ -12,6 +12,7 @@ from apps.enm.tasks import cal_mpointstat_manual
|
|||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import Serializer
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.views import APIView
|
||||
from apps.enm.tasks import cal_mpointstats_duration
|
||||
from apps.enm.services import king_sync, MpointCache
|
||||
from django.db import transaction
|
||||
|
@ -21,7 +22,13 @@ from apps.enm.services import get_analyse_data_mgroups_duration
|
|||
from django.db.models import Sum
|
||||
import logging
|
||||
from django.core.cache import cache
|
||||
from apps.utils.sql import query_one_dict, query_all_dict
|
||||
from drf_yasg import openapi
|
||||
from drf_yasg.utils import swagger_auto_schema
|
||||
from django.utils import timezone
|
||||
|
||||
myLogger = logging.getLogger('log')
|
||||
|
||||
class MpointViewSet(CustomModelViewSet):
|
||||
"""
|
||||
list:测点
|
||||
|
@ -166,6 +173,97 @@ class XscriptViewSet(CustomModelViewSet):
|
|||
# select_related_fields = ['mpoint']
|
||||
# filterset_fields = ['mpoint', 'mpoint__mgroup', 'mpoint__mgroup__belong_dept']
|
||||
|
||||
class MpLogxAPIView(APIView):
|
||||
"""
|
||||
list:测点采集数据
|
||||
|
||||
测点采集数据
|
||||
"""
|
||||
perms_map = {"get": "*"}
|
||||
|
||||
@swagger_auto_schema(manual_parameters=[
|
||||
openapi.Parameter('mpoint', openapi.IN_QUERY, description='测点ID', type=openapi.TYPE_STRING),
|
||||
openapi.Parameter('timex__gte', openapi.IN_QUERY, description='开始时间', type=openapi.TYPE_STRING),
|
||||
openapi.Parameter('timex__lte', openapi.IN_QUERY, description='结束时间', type=openapi.TYPE_STRING),
|
||||
openapi.Parameter('page', openapi.IN_QUERY, description='页码', type=openapi.TYPE_INTEGER),
|
||||
openapi.Parameter('page_size', openapi.IN_QUERY, description='每页数量', type=openapi.TYPE_INTEGER),
|
||||
openapi.Parameter('ordering', openapi.IN_QUERY, description='排序字段,如 -timex', type=openapi.TYPE_STRING),
|
||||
openapi.Parameter('fields', openapi.IN_QUERY, description='返回字段,如 timex,val_float,val_int', type=openapi.TYPE_STRING),
|
||||
])
|
||||
def get(self, request, *args, **kwargs):
|
||||
mpoint = request.query_params.get("mpoint", None)
|
||||
timex__gte_str = request.query_params.get("timex__gte", None)
|
||||
timex__lte_str = request.query_params.get("timex__lte", None)
|
||||
page = int(request.query_params.get("page", 1))
|
||||
page_size = int(request.query_params.get("page_size", 20))
|
||||
fields = request.query_params.get("fields", None)
|
||||
if page < 0 and page_size < 0:
|
||||
raise ParseError("page, page_size must be positive")
|
||||
ordering = request.query_params.get("ordering", "-timex") # 默认倒序
|
||||
|
||||
if mpoint is None or timex__gte_str is None:
|
||||
raise ParseError("mpoint, timex__gte are required")
|
||||
|
||||
# 处理时间
|
||||
timex__gte = timezone.make_aware(datetime.strptime(timex__gte_str, "%Y-%m-%d %H:%M:%S"))
|
||||
timex__lte = timezone.make_aware(datetime.strptime(timex__lte_str, "%Y-%m-%d %H:%M:%S")) if timex__lte_str else timezone.now()
|
||||
|
||||
# 统计总数
|
||||
count_sql = """SELECT COUNT(*) AS total_count FROM enm_mplogx
|
||||
WHERE mpoint_id=%s AND timex >= %s AND timex <= %s"""
|
||||
count_data = query_one_dict(count_sql, [mpoint, timex__gte, timex__lte], with_time_format=True)
|
||||
|
||||
# 排序白名单
|
||||
allowed_fields = {"timex", "val_mrs", "val_int", "val_float"} # 根据表字段修改
|
||||
order_fields = []
|
||||
for field in ordering.split(","):
|
||||
field = field.strip()
|
||||
if not field:
|
||||
continue
|
||||
desc = field.startswith("-")
|
||||
field_name = field[1:] if desc else field
|
||||
if field_name in allowed_fields:
|
||||
order_fields.append(f"{field_name} {'DESC' if desc else 'ASC'}")
|
||||
|
||||
# 如果没有合法字段,使用默认排序
|
||||
if not order_fields:
|
||||
order_fields = ["timex DESC"]
|
||||
|
||||
order_clause = "ORDER BY " + ", ".join(order_fields)
|
||||
|
||||
# 构造 SQL
|
||||
if page == 0:
|
||||
if fields:
|
||||
# 过滤白名单,避免非法列
|
||||
fields = [f for f in fields.split(",") if f in allowed_fields]
|
||||
if not fields:
|
||||
fields = ["timex", "val_float", "val_int"] # 默认列
|
||||
select_clause = ", ".join(fields)
|
||||
else:
|
||||
select_clause = "timex, val_float, val_int" # 默认列
|
||||
page_sql = f"""SELECT {select_clause} FROM enm_mplogx
|
||||
WHERE mpoint_id=%s AND timex >= %s AND timex <= %s
|
||||
{order_clause}"""
|
||||
page_params = [mpoint, timex__gte, timex__lte]
|
||||
else:
|
||||
page_sql = f"""SELECT * FROM enm_mplogx
|
||||
WHERE mpoint_id=%s AND timex >= %s AND timex <= %s
|
||||
{order_clause} LIMIT %s OFFSET %s"""
|
||||
page_params = [mpoint, timex__gte, timex__lte, page_size, (page-1)*page_size]
|
||||
|
||||
page_data = query_all_dict(page_sql, page_params, with_time_format=True)
|
||||
|
||||
if page == 0:
|
||||
return Response(page_data)
|
||||
|
||||
return Response({
|
||||
"count": count_data["total_count"],
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"results": page_data
|
||||
})
|
||||
|
||||
|
||||
|
||||
class MpLogxViewSet(CustomListModelMixin, CustomGenericViewSet):
|
||||
"""
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from django_filters import rest_framework as filters
|
||||
from apps.mtm.models import Goal, Material, Route
|
||||
from apps.mtm.models import Goal, Material, Route, RoutePack
|
||||
from django.db.models.expressions import F
|
||||
from rest_framework.exceptions import ParseError
|
||||
|
||||
|
||||
class MaterialFilter(filters.FilterSet):
|
||||
|
@ -45,6 +46,8 @@ class GoalFilter(filters.FilterSet):
|
|||
|
||||
|
||||
class RouteFilter(filters.FilterSet):
|
||||
nprocess_name = filters.CharFilter(method='filter_nprocess_name', label="nprocess_name")
|
||||
material_in_has = filters.CharFilter(method='filter_material_in_has', label="material_in_has ID")
|
||||
class Meta:
|
||||
model = Route
|
||||
fields = {
|
||||
|
@ -61,3 +64,15 @@ class RouteFilter(filters.FilterSet):
|
|||
"mgroup__belong_dept__name": ["exact", "contains"],
|
||||
"from_route": ["exact", "isnull"],
|
||||
}
|
||||
|
||||
def filter_nprocess_name(self, queryset, name, value):
|
||||
return queryset
|
||||
|
||||
|
||||
def filter_material_in_has(self, queryset, name, value):
|
||||
nprocess_name = self.data.get('nprocess_name', None)
|
||||
if nprocess_name:
|
||||
routepack_qs = queryset.filter(material_in__id=value, routepack__isnull=False, routepack__state=RoutePack.RP_S_CONFIRM).values_list('routepack', flat=True)
|
||||
qs = queryset.filter(routepack__in=routepack_qs, process__name=nprocess_name)
|
||||
return qs
|
||||
raise ParseError("nprocess_name is required")
|
|
@ -58,7 +58,7 @@ def daoru_material(path: str):
|
|||
i = 3
|
||||
if sheet['a2'].value != '物料编号':
|
||||
raise ParseError('列错误导入失败')
|
||||
while sheet[f'b{i}'].value is not None:
|
||||
while sheet[f'b{i}'].value is not None or sheet[f'd{i}'].value is not None:
|
||||
type_str = sheet[f'b{i}'].value.replace(' ', '')
|
||||
try:
|
||||
type = type_dict[type_str]
|
||||
|
@ -171,8 +171,8 @@ def bind_routepack(ticket: Ticket, transition, new_ticket_data: dict):
|
|||
ticket.save()
|
||||
if routepack.ticket is None:
|
||||
routepack.ticket = ticket
|
||||
routepack.state = RoutePack.RP_S_AUDIT
|
||||
routepack.save()
|
||||
routepack.state = RoutePack.RP_S_AUDIT
|
||||
routepack.save()
|
||||
|
||||
|
||||
def routepack_audit_end(ticket: Ticket):
|
||||
|
@ -182,7 +182,7 @@ def routepack_audit_end(ticket: Ticket):
|
|||
|
||||
def routepack_ticket_change(ticket: Ticket):
|
||||
routepack = RoutePack.objects.get(id=ticket.ticket_data['t_id'])
|
||||
if ticket.act_state == Ticket.TICKET_ACT_STATE_DRAFT:
|
||||
if ticket.act_state in [Ticket.TICKET_ACT_STATE_DRAFT, Ticket.TICKET_ACT_STATE_BACK, Ticket.TICKET_ACT_STATE_RETREAT]:
|
||||
routepack.state = RoutePack.RP_S_CREATE
|
||||
routepack.save()
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ from rest_framework.exceptions import ParseError
|
|||
myLogger = logging.getLogger('log')
|
||||
|
||||
|
||||
@auto_log(name='阿里云短信', raise_exception=True, send_mail=True)
|
||||
@auto_log(name='阿里云短信', raise_exception=True, send_mail=False)
|
||||
def send_sms(phone: str, template_code: int, template_param: dict):
|
||||
from aliyunsdkcore.client import AcsClient
|
||||
from aliyunsdkcore.request import CommonRequest
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
from django.db import connection
|
||||
from django.utils import timezone
|
||||
from datetime import datetime
|
||||
|
||||
def execute_raw_sql(sql: str, params=None):
|
||||
"""执行原始sql并返回rows, columns数据
|
||||
|
@ -23,7 +25,7 @@ def format_sqldata(columns, rows):
|
|||
return [columns] + rows, [dict(zip(columns, row)) for row in rows]
|
||||
|
||||
|
||||
def query_all_dict(sql, params=None):
|
||||
def query_all_dict(sql, params=None, with_time_format=False):
|
||||
'''
|
||||
查询所有结果返回字典类型数据
|
||||
:param sql:
|
||||
|
@ -36,9 +38,19 @@ def query_all_dict(sql, params=None):
|
|||
else:
|
||||
cursor.execute(sql)
|
||||
columns = [desc[0] for desc in cursor.description]
|
||||
if with_time_format:
|
||||
results = []
|
||||
for row in cursor.fetchall():
|
||||
row_dict = {}
|
||||
for col, val in zip(columns, row):
|
||||
if isinstance(val, datetime):
|
||||
val = timezone.make_naive(val).strftime("%Y-%m-%d %H:%M:%S")
|
||||
row_dict[col] = val
|
||||
results.append(row_dict)
|
||||
return results
|
||||
return [dict(zip(columns, row)) for row in cursor.fetchall()]
|
||||
|
||||
def query_one_dict(sql, params=None):
|
||||
def query_one_dict(sql, params=None, with_time_format=False):
|
||||
"""
|
||||
查询一个结果返回字典类型数据
|
||||
:param sql:
|
||||
|
@ -49,6 +61,13 @@ def query_one_dict(sql, params=None):
|
|||
cursor.execute(sql, params or ()) # 更简洁的参数处理
|
||||
columns = [desc[0] for desc in cursor.description]
|
||||
row = cursor.fetchone()
|
||||
if with_time_format:
|
||||
row_dict = {}
|
||||
for col, val in zip(columns, row):
|
||||
if isinstance(val, datetime):
|
||||
val = timezone.make_naive(val).strftime("%Y-%m-%d %H:%M:%S")
|
||||
row_dict[col] = val
|
||||
return row_dict
|
||||
return dict(zip(columns, row)) if row else None # 安全处理None情况
|
||||
|
||||
import pymysql
|
||||
|
|
|
@ -442,11 +442,17 @@ class WfService(object):
|
|||
last_log.intervene_type == Transition.TRANSITION_INTERVENE_TYPE_DELIVER or
|
||||
ticket.in_add_node):
|
||||
# 如果状态变化或是转交加签的情况再发送通知
|
||||
Thread(target=send_ticket_notice_t, args=(ticket,), daemon=True).start()
|
||||
cls.send_ticket_notice(ticketflow=last_log)
|
||||
|
||||
# 如果目标状态是脚本则异步执行
|
||||
if state.participant_type == State.PARTICIPANT_TYPE_ROBOT:
|
||||
run_task.delay(ticket_id=ticket.id)
|
||||
|
||||
@classmethod
|
||||
def send_ticket_notice(cls, ticketflow:TicketFlow):
|
||||
# 根据ticketflow发送通知
|
||||
Thread(target=send_ticket_notice_t, args=(ticketflow,), daemon=True).start()
|
||||
|
||||
|
||||
@classmethod
|
||||
def close_by_task(cls, ticket: Ticket, suggestion: str):
|
||||
|
@ -479,10 +485,13 @@ class WfService(object):
|
|||
suggestion=suggestion, participant_type=State.PARTICIPANT_TYPE_PERSONAL,
|
||||
intervene_type=Transition.TRANSITION_INTERVENE_TYPE_RETREAT,
|
||||
participant=handler, transition=None)
|
||||
def send_ticket_notice_t(ticket: Ticket):
|
||||
cls.task_ticket(ticket=ticket)
|
||||
|
||||
def send_ticket_notice_t(ticketflow: TicketFlow):
|
||||
"""
|
||||
发送通知
|
||||
"""
|
||||
ticket = ticketflow.ticket
|
||||
params = {'workflow': ticket.workflow.name, 'state': ticket.state.name}
|
||||
if ticket.participant_type == 1:
|
||||
# 发送短信通知
|
||||
|
|
|
@ -330,11 +330,12 @@ class TicketViewSet(CreateUpdateCustomMixin, CreateModelMixin, ListModelMixin, R
|
|||
ticket.participant_type = State.PARTICIPANT_TYPE_PERSONAL
|
||||
ticket.participant = vdata['target_user']
|
||||
ticket.save()
|
||||
TicketFlow.objects.create(ticket=ticket, state=ticket.state,
|
||||
tf = TicketFlow.objects.create(ticket=ticket, state=ticket.state,
|
||||
ticket_data=WfService.get_ticket_all_field_value(ticket),
|
||||
suggestion=vdata.get('suggestion', ''), participant_type=State.PARTICIPANT_TYPE_PERSONAL,
|
||||
intervene_type=Transition.TRANSITION_INTERVENE_TYPE_DELIVER,
|
||||
participant=request.user, transition=None)
|
||||
WfService.send_ticket_notice(ticketflow=tf)
|
||||
return Response()
|
||||
|
||||
@action(methods=['get'], detail=True, perms_map={'get': '*'})
|
||||
|
@ -382,11 +383,12 @@ class TicketViewSet(CreateUpdateCustomMixin, CreateModelMixin, ListModelMixin, R
|
|||
ticket.save()
|
||||
# 接单日志
|
||||
# 更新工单流转记录
|
||||
TicketFlow.objects.create(ticket=ticket, state=ticket.state,
|
||||
tf = TicketFlow.objects.create(ticket=ticket, state=ticket.state,
|
||||
ticket_data=WfService.get_ticket_all_field_value(ticket),
|
||||
suggestion='', participant_type=State.PARTICIPANT_TYPE_PERSONAL,
|
||||
intervene_type=Transition.TRANSITION_ATTRIBUTE_TYPE_ACCEPT,
|
||||
participant=request.user, transition=None)
|
||||
WfService.send_ticket_notice(ticketflow=tf)
|
||||
return Response()
|
||||
else:
|
||||
raise ParseError('无需接单')
|
||||
|
@ -421,11 +423,12 @@ class TicketViewSet(CreateUpdateCustomMixin, CreateModelMixin, ListModelMixin, R
|
|||
ticket.save()
|
||||
# 更新流转记录
|
||||
suggestion = request.data.get('suggestion', '') # 加签说明
|
||||
TicketFlow.objects.create(ticket=ticket, state=ticket.state,
|
||||
tf = TicketFlow.objects.create(ticket=ticket, state=ticket.state,
|
||||
ticket_data=WfService.get_ticket_all_field_value(ticket),
|
||||
suggestion=suggestion, participant_type=State.PARTICIPANT_TYPE_PERSONAL,
|
||||
intervene_type=Transition.TRANSITION_INTERVENE_TYPE_ADD_NODE,
|
||||
participant=request.user, transition=None)
|
||||
WfService.send_ticket_notice(ticketflow=tf)
|
||||
return Response()
|
||||
|
||||
@action(methods=['post'], detail=True, perms_map={'post': '*'}, serializer_class=TicketAddNodeEndSerializer)
|
||||
|
@ -445,11 +448,12 @@ class TicketViewSet(CreateUpdateCustomMixin, CreateModelMixin, ListModelMixin, R
|
|||
ticket.save()
|
||||
# 更新流转记录
|
||||
suggestion = request.data.get('suggestion', '') # 加签意见
|
||||
TicketFlow.objects.create(ticket=ticket, state=ticket.state,
|
||||
tf = TicketFlow.objects.create(ticket=ticket, state=ticket.state,
|
||||
ticket_data=WfService.get_ticket_all_field_value(ticket),
|
||||
suggestion=suggestion, participant_type=State.PARTICIPANT_TYPE_PERSONAL,
|
||||
intervene_type=Transition.TRANSITION_INTERVENE_TYPE_ADD_NODE_END,
|
||||
participant=request.user, transition=None)
|
||||
WfService.send_ticket_notice(ticketflow=tf)
|
||||
return Response()
|
||||
|
||||
@action(methods=['post'], detail=True, perms_map={'post': '*'},
|
||||
|
|
Loading…
Reference in New Issue