Compare commits
41 Commits
43bbc36106
...
6102ed712f
| Author | SHA1 | Date |
|---|---|---|
|
|
6102ed712f | |
|
|
f8757a757e | |
|
|
268c5f76af | |
|
|
01d8418e5c | |
|
|
6b9e396044 | |
|
|
3a9b52864b | |
|
|
3e026e1b99 | |
|
|
2db9fec081 | |
|
|
521ccfb800 | |
|
|
bc8b5a24ab | |
|
|
63cf3d8c67 | |
|
|
cea7726106 | |
|
|
b651b6af66 | |
|
|
4761e88c81 | |
|
|
9955498e24 | |
|
|
c961fc0298 | |
|
|
7ff40c1aa3 | |
|
|
837e648b7d | |
|
|
a3bc280c0f | |
|
|
797e96cbb5 | |
|
|
f9f9915012 | |
|
|
9bbbb82459 | |
|
|
8cea26cb07 | |
|
|
56e404555b | |
|
|
c3037e52cf | |
|
|
60452f9065 | |
|
|
3cc13ccf1e | |
|
|
c562bcc10b | |
|
|
9a3d1f5ebb | |
|
|
de1879bbf2 | |
|
|
7d83cf27dd | |
|
|
3d18a65c7d | |
|
|
6b3c52e45e | |
|
|
a1e2536844 | |
|
|
b34c898089 | |
|
|
cf26a1f319 | |
|
|
1fdfc4e5fc | |
|
|
fb6a34a2d9 | |
|
|
c472360061 | |
|
|
87450af171 | |
|
|
e2a2d4a55e |
|
|
@ -31,3 +31,4 @@ Thumbs.db
|
|||
.VSCodeCounter/
|
||||
cl.bat
|
||||
col.bat
|
||||
.worktrees/
|
||||
|
|
|
|||
|
|
@ -105,6 +105,45 @@ version = "1.0.102"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
|
||||
|
||||
[[package]]
|
||||
name = "app_feeder_distributor"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-opcua",
|
||||
"async-stream",
|
||||
"axum",
|
||||
"chrono",
|
||||
"dotenv",
|
||||
"plc_platform_core",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"sqlx",
|
||||
"tokio",
|
||||
"tower",
|
||||
"tower-http",
|
||||
"tracing",
|
||||
"tray-icon",
|
||||
"uuid",
|
||||
"validator",
|
||||
"webbrowser",
|
||||
"winit",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "app_operation_system"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"dotenv",
|
||||
"plc_platform_core",
|
||||
"tokio",
|
||||
"tower",
|
||||
"tower-http",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
version = "1.8.2"
|
||||
|
|
@ -1265,34 +1304,6 @@ dependencies = [
|
|||
"slab",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gateway_rs"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-opcua",
|
||||
"async-stream",
|
||||
"axum",
|
||||
"chrono",
|
||||
"dotenv",
|
||||
"fs2",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"sqlx",
|
||||
"time",
|
||||
"tokio",
|
||||
"tower-http",
|
||||
"tracing",
|
||||
"tracing-appender",
|
||||
"tracing-subscriber",
|
||||
"tray-icon",
|
||||
"uuid",
|
||||
"validator",
|
||||
"webbrowser",
|
||||
"winit",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gdk"
|
||||
version = "0.18.2"
|
||||
|
|
@ -2715,6 +2726,31 @@ version = "0.2.3"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
|
||||
|
||||
[[package]]
|
||||
name = "plc_platform_core"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-opcua",
|
||||
"async-stream",
|
||||
"axum",
|
||||
"chrono",
|
||||
"dotenv",
|
||||
"fs2",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"sqlx",
|
||||
"time",
|
||||
"tokio",
|
||||
"tower-http",
|
||||
"tracing",
|
||||
"tracing-appender",
|
||||
"tracing-subscriber",
|
||||
"uuid",
|
||||
"validator",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "png"
|
||||
version = "0.17.16"
|
||||
|
|
|
|||
58
Cargo.toml
58
Cargo.toml
|
|
@ -1,51 +1,7 @@
|
|||
[package]
|
||||
name = "gateway_rs"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
# Async runtime
|
||||
tokio = { version = "1.49", features = ["full"] }
|
||||
|
||||
# Web framework
|
||||
axum = { version = "0.8", features = ["ws"] }
|
||||
tower-http = { version = "0.6", features = ["cors", "fs"] }
|
||||
|
||||
# Database
|
||||
sqlx = { version = "0.8", features = ["runtime-tokio", "postgres", "chrono", "uuid", "json"] }
|
||||
|
||||
# Serialization
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde_with = "3.0"
|
||||
async-stream = "0.3"
|
||||
|
||||
# Time handling
|
||||
chrono = "0.4"
|
||||
time = "0.3"
|
||||
|
||||
# UUID
|
||||
uuid = { version = "1.21", features = ["serde", "v4"] }
|
||||
|
||||
# OPC UA
|
||||
async-opcua = { version = "0.18", features = ["client"] }
|
||||
|
||||
# Logging
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "time", "json"] }
|
||||
tracing-appender = "0.2"
|
||||
|
||||
# Environment variables
|
||||
dotenv = "0.15"
|
||||
|
||||
# Validation
|
||||
validator = { version = "0.20", features = ["derive"] }
|
||||
|
||||
# Error handling
|
||||
anyhow = "1.0"
|
||||
fs2 = "0.4"
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
tray-icon = "0.15"
|
||||
winit = "0.30"
|
||||
webbrowser = "0.8"
|
||||
[workspace]
|
||||
members = [
|
||||
"crates/plc_platform_core",
|
||||
"crates/app_feeder_distributor",
|
||||
"crates/app_operation_system",
|
||||
]
|
||||
resolver = "2"
|
||||
|
|
|
|||
91
README.md
91
README.md
|
|
@ -47,25 +47,59 @@ PLC Control 是一个面向 PLC / OPC UA 场景的本地控制与监控系统,
|
|||
- SSE 推送日志增量
|
||||
- 日志流默认跟随最新 `app.log*` 文件,支持轮转切换
|
||||
|
||||
## 系统设计
|
||||
## 项目结构
|
||||
|
||||
## 后端结构
|
||||
```text
|
||||
plc_control/
|
||||
Cargo.toml # Workspace root
|
||||
crates/
|
||||
plc_platform_core/ # 共享平台核心库(配置、数据库、连接、事件、WebSocket 等)
|
||||
app_feeder_distributor/ # 投煤器布料机专用版
|
||||
app_operation_system/ # 运转系统专用版
|
||||
web/
|
||||
core/ # 共享 HTML/CSS(数据源、点位、设备、图表、日志等)
|
||||
feeder/ # 投煤器布料机页面 + JS
|
||||
ops/ # 运转系统页面 + JS
|
||||
```
|
||||
|
||||
- `src/main.rs`
|
||||
- 启动 Axum 服务
|
||||
- 注册 HTTP 路由、WebSocket 路由和静态页面
|
||||
- `src/handler`
|
||||
- HTTP 接口层
|
||||
- `src/service`
|
||||
- 数据查询与写入封装
|
||||
- `src/control`
|
||||
- 自动控制引擎、运行时存储、手动控制校验、模拟反馈
|
||||
- `src/connection.rs`
|
||||
- OPC UA 连接管理、订阅、轮询、批量写点
|
||||
- `src/event.rs`
|
||||
- 控制事件、实时点位事件和事件持久化
|
||||
- `src/websocket.rs`
|
||||
- WebSocket 房间与实时消息广播
|
||||
### 共享平台核心库 (`plc_platform_core`)
|
||||
|
||||
- `model` — 数据模型
|
||||
- `db` — 数据库初始化
|
||||
- `connection` — OPC UA 连接管理、订阅、轮询、批量写点
|
||||
- `service` — 数据查询与写入封装
|
||||
- `control/command` — 通用控制命令
|
||||
- `control/runtime` — 运行时状态存储
|
||||
- `event` — 事件信封与命名空间
|
||||
- `websocket` — WebSocket 房间与消息广播
|
||||
- `telemetry` — 点位遥测
|
||||
|
||||
### 业务应用
|
||||
|
||||
- `app_feeder_distributor` — 投煤器/布料机控制引擎、业务 handler、业务事件
|
||||
- `app_operation_system` — 运转系统控制逻辑(开发中)
|
||||
|
||||
## 构建
|
||||
|
||||
```powershell
|
||||
# 投煤器布料机
|
||||
cargo build -p app_feeder_distributor --release
|
||||
|
||||
# 运转系统
|
||||
cargo build -p app_operation_system --release
|
||||
```
|
||||
|
||||
## 部署
|
||||
|
||||
将编译产物和 `web/` 目录放在同一级目录下:
|
||||
|
||||
```text
|
||||
deploy/
|
||||
app_feeder_distributor.exe
|
||||
web/
|
||||
core/
|
||||
feeder/
|
||||
```
|
||||
|
||||
## 控制引擎设计
|
||||
|
||||
|
|
@ -86,22 +120,13 @@ PLC Control 是一个面向 PLC / OPC UA 场景的本地控制与监控系统,
|
|||
|
||||
## 前端 Web 设计
|
||||
|
||||
前端采用原生 ES Module 和分片 HTML 结构。
|
||||
前端采用原生 ES Module 和分片 HTML 结构,按应用拆分目录:
|
||||
|
||||
关键模块:
|
||||
- `web/core/` — 共享 HTML 面板(数据源、点位、设备、图表、日志、文档抽屉)和样式
|
||||
- `web/feeder/` — 投煤器专用入口、运维面板、控制单元表单、全部 JS 模块
|
||||
- `web/ops/` — 运转系统专用入口(开发中)
|
||||
|
||||
- `web/js/app.js`: 页面启动、视图切换、事件绑定
|
||||
- `web/js/ops.js`: 运维视图渲染
|
||||
- `web/js/logs.js`: WebSocket 与日志 SSE 处理
|
||||
- `web/js/docs.js`: Markdown 文档抽屉
|
||||
- `web/js/units.js` / `equipment.js` / `points.js`: 配置视图业务逻辑
|
||||
|
||||
文档查看入口:
|
||||
|
||||
- 可在前端页面中查看 `API.md`
|
||||
- 可在前端页面中查看 `README.md`
|
||||
|
||||
两者都通过统一的文档抽屉组件展示。
|
||||
每个应用的 Axum 路由使用 `ServeDir` 回退链:先查应用目录,再查 core 目录,URL 路径无需变化。
|
||||
|
||||
## 实时日志设计
|
||||
|
||||
|
|
@ -139,7 +164,9 @@ PLC Control 是一个面向 PLC / OPC UA 场景的本地控制与监控系统,
|
|||
|
||||
## 文档索引
|
||||
|
||||
- API 接口说明: `API.md`
|
||||
- 投煤器布料机 API: `docs/api-feeder.md`
|
||||
- 运转系统 API: `docs/api-ops.md`
|
||||
- 双应用共享核心设计: `docs/superpowers/specs/2026-04-14-dual-app-shared-core-design.md`
|
||||
- 控制引擎计划: `docs/superpowers/plans/2026-03-24-control-engine.md`
|
||||
- 双视图前端计划: `docs/superpowers/plans/2026-03-25-dual-view-web.md`
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
[package]
|
||||
name = "app_feeder_distributor"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
plc_platform_core = { path = "../plc_platform_core" }
|
||||
tokio = { version = "1.49", features = ["full"] }
|
||||
axum = { version = "0.8", features = ["ws"] }
|
||||
tower-http = { version = "0.6", features = ["cors", "fs"] }
|
||||
tracing = "0.1"
|
||||
dotenv = "0.15"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde_with = "3.0"
|
||||
chrono = "0.4"
|
||||
sqlx = { version = "0.8", features = ["runtime-tokio", "postgres", "chrono", "uuid", "json"] }
|
||||
uuid = { version = "1.21", features = ["serde", "v4"] }
|
||||
async-stream = "0.3"
|
||||
async-opcua = { version = "0.18", features = ["client"] }
|
||||
validator = { version = "0.20", features = ["derive"] }
|
||||
anyhow = "1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
tower = { version = "0.5", features = ["util"] }
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
tray-icon = "0.15"
|
||||
winit = "0.30"
|
||||
webbrowser = "0.8"
|
||||
|
||||
[[bin]]
|
||||
name = "app_feeder_distributor"
|
||||
path = "src/main.rs"
|
||||
|
|
@ -0,0 +1,252 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use crate::{
|
||||
config::AppConfig,
|
||||
connection::ConnectionManager,
|
||||
control,
|
||||
db::init_database,
|
||||
event::EventManager,
|
||||
router::build_router,
|
||||
websocket,
|
||||
};
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
pub config: AppConfig,
|
||||
pub pool: sqlx::PgPool,
|
||||
pub connection_manager: Arc<ConnectionManager>,
|
||||
pub event_manager: Arc<EventManager>,
|
||||
pub ws_manager: Arc<websocket::WebSocketManager>,
|
||||
pub control_runtime: Arc<control::runtime::ControlRuntimeStore>,
|
||||
}
|
||||
|
||||
pub async fn run() {
|
||||
dotenv::dotenv().ok();
|
||||
plc_platform_core::util::log::init_logger();
|
||||
let _platform = plc_platform_core::bootstrap::bootstrap_platform();
|
||||
let _single_instance =
|
||||
match plc_platform_core::util::single_instance::try_acquire("PLCControl.FeederDistributor") {
|
||||
Ok(guard) => guard,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => {
|
||||
tracing::warn!("Another feeder distributor instance is already running");
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!("Failed to initialize single instance guard: {}", err);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let config = AppConfig::from_env().expect("Failed to load configuration");
|
||||
let pool = init_database(&config.database_url)
|
||||
.await
|
||||
.expect("Failed to initialize database");
|
||||
|
||||
let mut connection_manager = ConnectionManager::new();
|
||||
let ws_manager = Arc::new(websocket::WebSocketManager::new());
|
||||
let event_manager = Arc::new(EventManager::new(
|
||||
pool.clone(),
|
||||
Arc::new(connection_manager.clone()),
|
||||
Some(ws_manager.clone()),
|
||||
));
|
||||
connection_manager.set_event_manager(event_manager.clone());
|
||||
connection_manager.set_pool_and_start_reconnect_task(Arc::new(pool.clone()));
|
||||
|
||||
let connection_manager = Arc::new(connection_manager);
|
||||
let control_runtime = Arc::new(control::runtime::ControlRuntimeStore::new());
|
||||
|
||||
let sources = crate::service::get_all_enabled_sources(&pool)
|
||||
.await
|
||||
.expect("Failed to fetch sources");
|
||||
|
||||
let mut tasks = Vec::new();
|
||||
for source in sources {
|
||||
let cm = connection_manager.clone();
|
||||
let p = pool.clone();
|
||||
let source_name = source.name.clone();
|
||||
let source_id = source.id;
|
||||
|
||||
let task = tokio::spawn(async move {
|
||||
if let Err(err) = cm.connect_from_source(&p, source_id).await {
|
||||
tracing::error!("Failed to connect to source {}: {}", source_name, err);
|
||||
}
|
||||
});
|
||||
|
||||
tasks.push(task);
|
||||
}
|
||||
|
||||
for task in tasks {
|
||||
if let Err(err) = task.await {
|
||||
tracing::error!("Source connection task failed: {:?}", err);
|
||||
}
|
||||
}
|
||||
|
||||
let state = AppState {
|
||||
config: config.clone(),
|
||||
pool,
|
||||
connection_manager: connection_manager.clone(),
|
||||
event_manager,
|
||||
ws_manager,
|
||||
control_runtime: control_runtime.clone(),
|
||||
};
|
||||
control::engine::start(state.clone(), control_runtime);
|
||||
if config.simulate_plc {
|
||||
control::simulate::start(state.clone());
|
||||
}
|
||||
|
||||
let app = build_router(state.clone());
|
||||
let addr = format!("{}:{}", config.server_host, config.server_port);
|
||||
tracing::info!("Starting feeder distributor server at http://{}", addr);
|
||||
let listener = tokio::net::TcpListener::bind(addr).await.unwrap();
|
||||
|
||||
let ui_url = format!("http://{}:{}/ui", "localhost", config.server_port);
|
||||
let (shutdown_tx, mut shutdown_rx) = mpsc::channel::<()>(1);
|
||||
let shutdown_tx_ctrl = shutdown_tx.clone();
|
||||
let rt_handle = tokio::runtime::Handle::current();
|
||||
init_tray(ui_url, shutdown_tx.clone(), rt_handle);
|
||||
|
||||
let connection_manager_for_shutdown = connection_manager.clone();
|
||||
tokio::spawn(async move {
|
||||
tokio::signal::ctrl_c()
|
||||
.await
|
||||
.expect("Failed to install Ctrl+C handler");
|
||||
let _ = shutdown_tx_ctrl.send(()).await;
|
||||
});
|
||||
|
||||
let shutdown_signal = async move {
|
||||
let _ = shutdown_rx.recv().await;
|
||||
tracing::info!("Received shutdown signal, closing all feeder connections...");
|
||||
connection_manager_for_shutdown.disconnect_all().await;
|
||||
tracing::info!("All feeder connections closed");
|
||||
};
|
||||
|
||||
axum::serve(listener, app)
|
||||
.with_graceful_shutdown(shutdown_signal)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
pub fn test_state() -> AppState {
|
||||
let database_url = "postgres://plc:plc@localhost/plc_control_test".to_string();
|
||||
let pool = sqlx::postgres::PgPoolOptions::new()
|
||||
.connect_lazy(&database_url)
|
||||
.expect("lazy pool should build");
|
||||
let connection_manager = Arc::new(ConnectionManager::new());
|
||||
let ws_manager = Arc::new(websocket::WebSocketManager::new());
|
||||
let event_manager = Arc::new(EventManager::new(
|
||||
pool.clone(),
|
||||
connection_manager.clone(),
|
||||
Some(ws_manager.clone()),
|
||||
));
|
||||
|
||||
AppState {
|
||||
config: AppConfig {
|
||||
database_url,
|
||||
server_host: "127.0.0.1".to_string(),
|
||||
server_port: 0,
|
||||
write_api_key: Some("test-write-key".to_string()),
|
||||
simulate_plc: false,
|
||||
},
|
||||
pool,
|
||||
connection_manager,
|
||||
event_manager,
|
||||
ws_manager,
|
||||
control_runtime: Arc::new(control::runtime::ControlRuntimeStore::new()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn init_tray(ui_url: String, shutdown_tx: mpsc::Sender<()>, rt_handle: tokio::runtime::Handle) {
|
||||
std::thread::spawn(move || {
|
||||
if let Err(err) = tray::run_tray(ui_url, shutdown_tx, rt_handle) {
|
||||
tracing::warn!("Tray init failed: {}", err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn init_tray(_ui_url: String, _shutdown_tx: mpsc::Sender<()>, _rt_handle: tokio::runtime::Handle) {}
|
||||
|
||||
#[cfg(windows)]
|
||||
mod tray {
|
||||
use std::error::Error;
|
||||
|
||||
use tokio::sync::mpsc;
|
||||
use tray_icon::{
|
||||
menu::{Menu, MenuEvent, MenuItem},
|
||||
Icon, TrayIconBuilder,
|
||||
};
|
||||
use winit::application::ApplicationHandler;
|
||||
use winit::event_loop::{ActiveEventLoop, ControlFlow, EventLoop};
|
||||
use winit::platform::windows::EventLoopBuilderExtWindows;
|
||||
|
||||
pub fn run_tray(
|
||||
ui_url: String,
|
||||
shutdown_tx: mpsc::Sender<()>,
|
||||
rt_handle: tokio::runtime::Handle,
|
||||
) -> Result<(), Box<dyn Error>> {
|
||||
let mut builder = EventLoop::builder();
|
||||
builder.with_any_thread(true);
|
||||
let event_loop = builder.build()?;
|
||||
|
||||
let menu = Menu::new();
|
||||
let open_item = MenuItem::new("Open UI", true, None);
|
||||
let exit_item = MenuItem::new("Exit", true, None);
|
||||
menu.append(&open_item)?;
|
||||
menu.append(&exit_item)?;
|
||||
|
||||
let icon = Icon::from_rgba(vec![0, 120, 212, 255], 1, 1)?;
|
||||
let _tray = TrayIconBuilder::new()
|
||||
.with_tooltip("PLC Feeder Distributor")
|
||||
.with_menu(Box::new(menu))
|
||||
.with_icon(icon)
|
||||
.build()?;
|
||||
|
||||
let menu_rx = MenuEvent::receiver();
|
||||
let mut app = TrayApp {
|
||||
menu_rx,
|
||||
open_id: open_item.id().clone(),
|
||||
exit_id: exit_item.id().clone(),
|
||||
ui_url,
|
||||
shutdown_tx,
|
||||
rt_handle,
|
||||
};
|
||||
|
||||
event_loop.run_app(&mut app).map_err(|err| err.into())
|
||||
}
|
||||
|
||||
struct TrayApp {
|
||||
menu_rx: &'static tray_icon::menu::MenuEventReceiver,
|
||||
open_id: tray_icon::menu::MenuId,
|
||||
exit_id: tray_icon::menu::MenuId,
|
||||
ui_url: String,
|
||||
shutdown_tx: mpsc::Sender<()>,
|
||||
rt_handle: tokio::runtime::Handle,
|
||||
}
|
||||
|
||||
impl ApplicationHandler for TrayApp {
|
||||
fn resumed(&mut self, _event_loop: &ActiveEventLoop) {}
|
||||
|
||||
fn window_event(
|
||||
&mut self,
|
||||
_event_loop: &ActiveEventLoop,
|
||||
_window_id: winit::window::WindowId,
|
||||
_event: winit::event::WindowEvent,
|
||||
) {
|
||||
}
|
||||
|
||||
fn about_to_wait(&mut self, event_loop: &ActiveEventLoop) {
|
||||
event_loop.set_control_flow(ControlFlow::Wait);
|
||||
while let Ok(menu_event) = self.menu_rx.try_recv() {
|
||||
if menu_event.id == self.open_id {
|
||||
let _ = webbrowser::open(&self.ui_url);
|
||||
}
|
||||
if menu_event.id == self.exit_id {
|
||||
let _ = self.rt_handle.block_on(self.shutdown_tx.send(()));
|
||||
event_loop.exit();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use std::env;
|
||||
use std::env;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppConfig {
|
||||
|
|
@ -52,12 +52,12 @@ async fn supervise(state: AppState, store: Arc<ControlRuntimeStore>) {
|
|||
}
|
||||
}
|
||||
|
||||
// ── Per-unit task ─────────────────────────────────────────────────────────────
|
||||
// Per-unit task.
|
||||
|
||||
async fn unit_task(state: AppState, store: Arc<ControlRuntimeStore>, unit_id: Uuid) {
|
||||
let notify = store.get_or_create_notify(unit_id).await;
|
||||
|
||||
// Fault/comm check ticker — still need periodic polling of point monitor data.
|
||||
// Fault/comm check ticker; still need periodic polling of point monitor data.
|
||||
let mut fault_tick = tokio::time::interval(Duration::from_millis(500));
|
||||
fault_tick.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
|
||||
|
|
@ -76,7 +76,7 @@ async fn unit_task(state: AppState, store: Arc<ControlRuntimeStore>, unit_id: Uu
|
|||
}
|
||||
};
|
||||
|
||||
// ── Fault / comm check ────────────────────────────────────────────────
|
||||
// Fault / comm check.
|
||||
let (kind_roles, kind_eq_ids, all_roles) = match load_equipment_maps(&state, unit_id).await {
|
||||
Ok(maps) => maps,
|
||||
Err(e) => {
|
||||
|
|
@ -92,7 +92,7 @@ async fn unit_task(state: AppState, store: Arc<ControlRuntimeStore>, unit_id: Uu
|
|||
push_ws(&state, &runtime).await;
|
||||
}
|
||||
|
||||
// ── Wait when not active ──────────────────────────────────────────────
|
||||
// Wait when not active.
|
||||
if !runtime.auto_enabled || runtime.fault_locked || runtime.comm_locked || runtime.manual_ack_required {
|
||||
tokio::select! {
|
||||
_ = fault_tick.tick() => {}
|
||||
|
|
@ -106,7 +106,7 @@ async fn unit_task(state: AppState, store: Arc<ControlRuntimeStore>, unit_id: Uu
|
|||
continue;
|
||||
}
|
||||
|
||||
// ── State machine step ────────────────────────────────────────────────
|
||||
// State machine step.
|
||||
match runtime.state {
|
||||
UnitRuntimeState::Stopped => {
|
||||
// Wait stop_time_sec (0 = skip wait, start immediately).
|
||||
|
|
@ -138,7 +138,10 @@ async fn unit_task(state: AppState, store: Arc<ControlRuntimeStore>, unit_id: Uu
|
|||
// Wait run_time_sec. run_time_sec == 0 means run without a time limit
|
||||
// (relies on acc_time_sec to eventually stop). Treat as a very long phase.
|
||||
let secs = if unit.run_time_sec > 0 { unit.run_time_sec } else { i32::MAX };
|
||||
let unit_for_wait = crate::model::ControlUnit { run_time_sec: secs, ..unit.clone() };
|
||||
let unit_for_wait = plc_platform_core::model::ControlUnit {
|
||||
run_time_sec: secs,
|
||||
..unit.clone()
|
||||
};
|
||||
if !wait_phase(&state, &store, &unit_for_wait, &all_roles, ¬ify, &mut fault_tick).await {
|
||||
continue;
|
||||
}
|
||||
|
|
@ -162,7 +165,7 @@ async fn unit_task(state: AppState, store: Arc<ControlRuntimeStore>, unit_id: Uu
|
|||
runtime.display_acc_sec = runtime.accumulated_run_sec;
|
||||
|
||||
if unit.acc_time_sec > 0 && runtime.accumulated_run_sec >= unit.acc_time_sec as i64 * 1000 {
|
||||
// Accumulated threshold reached — start distributor.
|
||||
// Accumulated threshold reached; start distributor.
|
||||
let monitor = state.connection_manager.get_point_monitor_data_read_guard().await;
|
||||
let dist_cmd = kind_roles.get("distributor").and_then(|r| find_cmd(r, "start_cmd", &monitor));
|
||||
drop(monitor);
|
||||
|
|
@ -220,7 +223,7 @@ async fn unit_task(state: AppState, store: Arc<ControlRuntimeStore>, unit_id: Uu
|
|||
}
|
||||
}
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
// Helpers.
|
||||
|
||||
/// Sleep for the duration appropriate to the *current* state, interrupting every
|
||||
/// 500 ms to re-check fault/comm. Returns `true` when the full time elapsed,
|
||||
|
|
@ -228,7 +231,7 @@ async fn unit_task(state: AppState, store: Arc<ControlRuntimeStore>, unit_id: Uu
|
|||
async fn wait_phase(
|
||||
state: &AppState,
|
||||
store: &ControlRuntimeStore,
|
||||
unit: &crate::model::ControlUnit,
|
||||
unit: &plc_platform_core::model::ControlUnit,
|
||||
all_roles: &[(Uuid, HashMap<String, EquipmentRolePoint>)],
|
||||
notify: &Arc<Notify>,
|
||||
fault_tick: &mut tokio::time::Interval,
|
||||
|
|
@ -279,7 +282,7 @@ async fn push_ws(state: &AppState, runtime: &UnitRuntime) {
|
|||
async fn check_fault_comm(
|
||||
state: &AppState,
|
||||
runtime: &mut UnitRuntime,
|
||||
unit: &crate::model::ControlUnit,
|
||||
unit: &plc_platform_core::model::ControlUnit,
|
||||
all_roles: &[(Uuid, HashMap<String, EquipmentRolePoint>)],
|
||||
) -> bool {
|
||||
let monitor = state
|
||||
|
|
@ -431,7 +434,7 @@ async fn load_equipment_maps(state: &AppState, unit_id: Uuid) -> Result<EquipMap
|
|||
|
||||
fn build_equipment_maps(
|
||||
unit_id: Uuid,
|
||||
equipment_list: &[crate::model::Equipment],
|
||||
equipment_list: &[plc_platform_core::model::Equipment],
|
||||
mut role_points_by_equipment: HashMap<Uuid, Vec<EquipmentRolePoint>>,
|
||||
) -> EquipMaps {
|
||||
let mut kind_roles: HashMap<String, HashMap<String, EquipmentRolePoint>> = HashMap::new();
|
||||
|
|
@ -496,7 +499,7 @@ fn find_cmd(
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::build_equipment_maps;
|
||||
use crate::model::Equipment;
|
||||
use plc_platform_core::model::Equipment;
|
||||
use crate::service::EquipmentRolePoint;
|
||||
use chrono::Utc;
|
||||
use std::collections::HashMap;
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
pub use plc_platform_core::control::{command, runtime};
|
||||
|
||||
pub mod engine;
|
||||
pub mod simulate;
|
||||
pub mod validator;
|
||||
|
||||
use crate::telemetry::{DataValue, PointMonitorInfo};
|
||||
|
||||
pub(crate) fn monitor_value_as_bool(monitor: &PointMonitorInfo) -> bool {
|
||||
match monitor.value.as_ref() {
|
||||
Some(DataValue::Bool(value)) => *value,
|
||||
Some(DataValue::Int(value)) => *value != 0,
|
||||
Some(DataValue::UInt(value)) => *value != 0,
|
||||
Some(DataValue::Float(value)) => *value != 0.0,
|
||||
Some(DataValue::Text(value)) => {
|
||||
matches!(value.trim().to_ascii_lowercase().as_str(), "1" | "true" | "on" | "yes")
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
|
@ -20,7 +20,7 @@ async fn run(state: AppState) {
|
|||
let mut rng = seed_rng();
|
||||
|
||||
loop {
|
||||
// Wait a random 15–60 s between events.
|
||||
// Wait a random 15-60 s between events.
|
||||
let wait_secs = 15 + xorshift(&mut rng) % 46;
|
||||
tokio::time::sleep(Duration::from_secs(wait_secs)).await;
|
||||
|
||||
|
|
@ -31,7 +31,7 @@ async fn run(state: AppState) {
|
|||
};
|
||||
let unit = &units[xorshift(&mut rng) as usize % units.len()];
|
||||
|
||||
// Only target units with auto control running — otherwise the event is uninteresting.
|
||||
// Only target units with auto control running; otherwise the event is uninteresting.
|
||||
let runtime = state.control_runtime.get(unit.id).await;
|
||||
if runtime.map_or(true, |r| !r.auto_enabled) {
|
||||
continue;
|
||||
|
|
@ -68,11 +68,11 @@ async fn run(state: AppState) {
|
|||
.find(|p| p.signal_role == target_role)
|
||||
.unwrap();
|
||||
|
||||
// rem=false → not in remote mode (blocks commands)
|
||||
// flt=true → fault signal active (triggers fault lock)
|
||||
// rem=false means the equipment is not in remote mode.
|
||||
// flt=true means the equipment reports an active fault.
|
||||
let trigger_value = target_role == "flt";
|
||||
|
||||
// Hold duration: 5–15 s for rem, 3–10 s for flt.
|
||||
// Hold duration: 5-15 s for rem, 3-10 s for flt.
|
||||
let hold_secs = if target_role == "flt" {
|
||||
3 + xorshift(&mut rng) % 8
|
||||
} else {
|
||||
|
|
@ -80,20 +80,20 @@ async fn run(state: AppState) {
|
|||
};
|
||||
|
||||
tracing::info!(
|
||||
"[chaos] unit={} eq={} role={} → {} (hold {}s)",
|
||||
"[chaos] unit={} eq={} role={} -> {} (hold {}s)",
|
||||
unit.code,
|
||||
eq.code,
|
||||
target_role,
|
||||
if trigger_value { "FAULT" } else { "REM OFF" },
|
||||
hold_secs
|
||||
);
|
||||
|
||||
patch_signal(&state, target_point.point_id, trigger_value).await;
|
||||
patch_signal(&state, target_point.point_id, trigger_value).await;
|
||||
tokio::time::sleep(Duration::from_secs(hold_secs)).await;
|
||||
patch_signal(&state, target_point.point_id, !trigger_value).await;
|
||||
|
||||
tracing::info!(
|
||||
"[chaos] unit={} eq={} role={} → RESTORED",
|
||||
"[chaos] unit={} eq={} role={} -> RESTORED",
|
||||
unit.code,
|
||||
eq.code,
|
||||
target_role
|
||||
|
|
@ -170,7 +170,7 @@ pub async fn patch_signal(state: &AppState, point_id: Uuid, value_on: bool) {
|
|||
source_id: Uuid::nil(),
|
||||
point_id,
|
||||
client_handle: 0,
|
||||
scan_mode: crate::model::ScanMode::Poll,
|
||||
scan_mode: plc_platform_core::model::ScanMode::Poll,
|
||||
timestamp: Some(chrono::Utc::now()),
|
||||
quality: PointQuality::Good,
|
||||
value: Some(value),
|
||||
|
|
@ -196,7 +196,7 @@ pub async fn patch_signal(state: &AppState, point_id: Uuid, value_on: bool) {
|
|||
.await;
|
||||
}
|
||||
|
||||
// ── Minimal XorShift64 PRNG (no external crate needed) ────────────────────────
|
||||
// Minimal XorShift64 PRNG (no external crate needed).
|
||||
|
||||
fn seed_rng() -> u64 {
|
||||
std::time::SystemTime::now()
|
||||
|
|
@ -117,7 +117,7 @@ pub async fn validate_manual_control(
|
|||
|
||||
drop(monitor_guard);
|
||||
|
||||
// Runtime state checks — block commands if unit is locked
|
||||
// Runtime state checks; block commands if the unit is locked.
|
||||
if let Some(unit_id) = equipment.unit_id {
|
||||
if let Some(runtime) = state.control_runtime.get(unit_id).await {
|
||||
if runtime.auto_enabled {
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
use std::collections::HashMap;
|
||||
use plc_platform_core::event::EventEnvelope;
|
||||
use tokio::sync::mpsc;
|
||||
use uuid::Uuid;
|
||||
use plc_platform_core::model::EventRecord;
|
||||
|
||||
const CONTROL_EVENT_CHANNEL_CAPACITY: usize = 1024;
|
||||
const TELEMETRY_EVENT_CHANNEL_CAPACITY: usize = 4096;
|
||||
|
|
@ -141,6 +143,15 @@ impl EventManager {
|
|||
}
|
||||
}
|
||||
|
||||
impl plc_platform_core::connection::PointEventSink for EventManager {
|
||||
fn send_point_new_value(
|
||||
&self,
|
||||
payload: plc_platform_core::telemetry::PointNewValue,
|
||||
) -> Result<(), String> {
|
||||
self.send(AppEvent::PointNewValue(payload))
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_control_event(
|
||||
event: AppEvent,
|
||||
pool: &sqlx::PgPool,
|
||||
|
|
@ -254,7 +265,7 @@ async fn handle_control_event(
|
|||
tracing::info!("REM recovered for unit {}", unit_id);
|
||||
}
|
||||
AppEvent::UnitStateChanged { unit_id, from_state, to_state } => {
|
||||
tracing::info!("Unit {} state: {} → {}", unit_id, from_state, to_state);
|
||||
tracing::info!("Unit {} state: {} -> {}", unit_id, from_state, to_state);
|
||||
}
|
||||
AppEvent::PointNewValue(_) => {
|
||||
tracing::warn!("PointNewValue routed to control worker unexpectedly");
|
||||
|
|
@ -303,7 +314,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"source.created", "info",
|
||||
None, None, Some(*source_id),
|
||||
format!("数据源【{}】已创建", name),
|
||||
format!("Source {} created", name),
|
||||
serde_json::json!({ "source_id": source_id }),
|
||||
))
|
||||
}
|
||||
|
|
@ -312,14 +323,14 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"source.updated", "info",
|
||||
None, None, Some(*source_id),
|
||||
format!("数据源【{}】已更新", name),
|
||||
format!("Source {} updated", name),
|
||||
serde_json::json!({ "source_id": source_id }),
|
||||
))
|
||||
}
|
||||
AppEvent::SourceDelete { source_id, source_name } => Some((
|
||||
"source.deleted", "warn",
|
||||
None, None, None,
|
||||
format!("数据源【{}】已删除", source_name),
|
||||
format!("Source {} deleted", source_name),
|
||||
serde_json::json!({ "source_id": source_id }),
|
||||
)),
|
||||
AppEvent::PointCreateBatch { source_id, point_ids } => {
|
||||
|
|
@ -327,7 +338,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"point.batch_created", "info",
|
||||
None, None, Some(*source_id),
|
||||
format!("批量创建 {} 个测点(数据源:{})", point_ids.len(), name),
|
||||
format!("Created {} points for source {}", point_ids.len(), name),
|
||||
serde_json::json!({ "source_id": source_id, "point_ids": point_ids }),
|
||||
))
|
||||
}
|
||||
|
|
@ -336,7 +347,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"point.batch_deleted", "warn",
|
||||
None, None, Some(*source_id),
|
||||
format!("批量删除 {} 个测点(数据源:{})", point_ids.len(), name),
|
||||
format!("Deleted {} points for source {}", point_ids.len(), name),
|
||||
serde_json::json!({ "source_id": source_id, "point_ids": point_ids }),
|
||||
))
|
||||
}
|
||||
|
|
@ -345,7 +356,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"equipment.start_command_sent", "info",
|
||||
*unit_id, Some(*equipment_id), None,
|
||||
format!("已发送启动指令(设备:{})", code),
|
||||
format!("Start command sent to equipment {}", code),
|
||||
serde_json::json!({
|
||||
"equipment_id": equipment_id,
|
||||
"unit_id": unit_id,
|
||||
|
|
@ -358,7 +369,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"equipment.stop_command_sent", "info",
|
||||
*unit_id, Some(*equipment_id), None,
|
||||
format!("已发送停止指令(设备:{})", code),
|
||||
format!("Stop command sent to equipment {}", code),
|
||||
serde_json::json!({
|
||||
"equipment_id": equipment_id,
|
||||
"unit_id": unit_id,
|
||||
|
|
@ -371,7 +382,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"unit.auto_control_started", "info",
|
||||
Some(*unit_id), None, None,
|
||||
format!("已启动自动控制(单元:{})", code),
|
||||
format!("Auto control started for unit {}", code),
|
||||
serde_json::json!({ "unit_id": unit_id }),
|
||||
))
|
||||
}
|
||||
|
|
@ -380,7 +391,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"unit.auto_control_stopped", "info",
|
||||
Some(*unit_id), None, None,
|
||||
format!("已停止自动控制(单元:{})", code),
|
||||
format!("Auto control stopped for unit {}", code),
|
||||
serde_json::json!({ "unit_id": unit_id }),
|
||||
))
|
||||
}
|
||||
|
|
@ -390,7 +401,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"unit.fault_locked", "error",
|
||||
Some(*unit_id), Some(*equipment_id), None,
|
||||
format!("单元【{}】发生故障锁定,触发设备:{}", unit_code, eq_code),
|
||||
format!("Fault locked for unit {} by equipment {}", unit_code, eq_code),
|
||||
serde_json::json!({ "unit_id": unit_id, "equipment_id": equipment_id }),
|
||||
))
|
||||
}
|
||||
|
|
@ -399,7 +410,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"unit.fault_acked", "info",
|
||||
Some(*unit_id), None, None,
|
||||
format!("单元【{}】故障已人工确认", code),
|
||||
format!("Fault acknowledged for unit {}", code),
|
||||
serde_json::json!({ "unit_id": unit_id }),
|
||||
))
|
||||
}
|
||||
|
|
@ -408,7 +419,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"unit.comm_locked", "warn",
|
||||
Some(*unit_id), None, None,
|
||||
format!("单元【{}】通讯中断", code),
|
||||
format!("Communication locked for unit {}", code),
|
||||
serde_json::json!({ "unit_id": unit_id }),
|
||||
))
|
||||
}
|
||||
|
|
@ -417,7 +428,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"unit.comm_recovered", "info",
|
||||
Some(*unit_id), None, None,
|
||||
format!("单元【{}】通讯恢复", code),
|
||||
format!("Communication recovered for unit {}", code),
|
||||
serde_json::json!({ "unit_id": unit_id }),
|
||||
))
|
||||
}
|
||||
|
|
@ -427,7 +438,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"unit.rem_local", "warn",
|
||||
Some(*unit_id), Some(*equipment_id), None,
|
||||
format!("单元【{}】切换为本地控制,触发设备:{},自动控制已停止", unit_code, eq_code),
|
||||
format!("Unit {} switched to local control via equipment {}", unit_code, eq_code),
|
||||
serde_json::json!({ "unit_id": unit_id, "equipment_id": equipment_id }),
|
||||
))
|
||||
}
|
||||
|
|
@ -436,7 +447,7 @@ async fn persist_event_if_needed(
|
|||
Some((
|
||||
"unit.rem_recovered", "warn",
|
||||
Some(*unit_id), None, None,
|
||||
format!("单元【{}】已切换回远程控制,自动控制需手动重新启动", code),
|
||||
format!("Unit {} returned to remote control; auto control requires manual restart", code),
|
||||
serde_json::json!({ "unit_id": unit_id }),
|
||||
))
|
||||
}
|
||||
|
|
@ -447,21 +458,22 @@ async fn persist_event_if_needed(
|
|||
let Some((event_type, level, unit_id, equipment_id, source_id, message, payload)) = record else {
|
||||
return;
|
||||
};
|
||||
let envelope = EventEnvelope::new(event_type, payload);
|
||||
|
||||
let inserted = sqlx::query_as::<_, crate::model::EventRecord>(
|
||||
let inserted = sqlx::query_as::<_, EventRecord>(
|
||||
r#"
|
||||
INSERT INTO event (event_type, level, unit_id, equipment_id, source_id, message, payload)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING *
|
||||
"#,
|
||||
)
|
||||
.bind(event_type)
|
||||
.bind(envelope.event_type)
|
||||
.bind(level)
|
||||
.bind(unit_id as Option<Uuid>)
|
||||
.bind(equipment_id as Option<Uuid>)
|
||||
.bind(source_id)
|
||||
.bind(message)
|
||||
.bind(sqlx::types::Json(payload))
|
||||
.bind(sqlx::types::Json(envelope.payload))
|
||||
.fetch_one(pool)
|
||||
.await;
|
||||
|
||||
|
|
@ -496,7 +508,7 @@ async fn process_point_new_value(
|
|||
.and_then(|s| s.client_handle_map.get(&client_handle).copied())
|
||||
};
|
||||
if let Some(point_id) = point_id {
|
||||
// 从缓存中读取旧值
|
||||
// Read the previous value from the in-memory cache.
|
||||
let (old_value, old_timestamp, value_changed) = {
|
||||
let monitor_data = connection_manager.get_point_monitor_data_read_guard().await;
|
||||
let old_monitor_info = monitor_data.get(&point_id);
|
||||
|
|
@ -1,7 +1,9 @@
|
|||
pub mod control;
|
||||
pub mod control;
|
||||
pub mod doc;
|
||||
pub mod equipment;
|
||||
pub mod log;
|
||||
pub mod log {
|
||||
pub use plc_platform_core::handler::log::*;
|
||||
}
|
||||
pub mod page;
|
||||
pub mod point;
|
||||
pub mod source;
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use axum::{
|
||||
use axum::{
|
||||
extract::{Path, Query, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
|
|
@ -50,14 +50,14 @@ pub struct GetUnitListQuery {
|
|||
#[derive(serde::Serialize)]
|
||||
pub struct UnitEquipmentItem {
|
||||
#[serde(flatten)]
|
||||
pub equipment: crate::model::Equipment,
|
||||
pub equipment: plc_platform_core::model::Equipment,
|
||||
pub role_points: Vec<crate::handler::equipment::SignalRolePoint>,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct UnitWithRuntime {
|
||||
#[serde(flatten)]
|
||||
pub unit: crate::model::ControlUnit,
|
||||
pub unit: plc_platform_core::model::ControlUnit,
|
||||
pub runtime: Option<crate::control::runtime::UnitRuntime>,
|
||||
pub equipments: Vec<UnitEquipmentItem>,
|
||||
}
|
||||
|
|
@ -250,21 +250,21 @@ pub async fn get_unit(
|
|||
#[derive(serde::Serialize)]
|
||||
pub struct PointDetail {
|
||||
#[serde(flatten)]
|
||||
pub point: crate::model::Point,
|
||||
pub point: plc_platform_core::model::Point,
|
||||
pub point_monitor: Option<crate::telemetry::PointMonitorInfo>,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct EquipmentDetail {
|
||||
#[serde(flatten)]
|
||||
pub equipment: crate::model::Equipment,
|
||||
pub equipment: plc_platform_core::model::Equipment,
|
||||
pub points: Vec<PointDetail>,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct UnitDetail {
|
||||
#[serde(flatten)]
|
||||
pub unit: crate::model::ControlUnit,
|
||||
pub unit: plc_platform_core::model::ControlUnit,
|
||||
pub runtime: Option<crate::control::runtime::UnitRuntime>,
|
||||
pub equipments: Vec<EquipmentDetail>,
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
use axum::response::IntoResponse;
|
||||
use plc_platform_core::util::response::ApiErr;
|
||||
|
||||
pub async fn get_api_md() -> Result<impl IntoResponse, ApiErr> {
|
||||
plc_platform_core::handler::doc::serve_markdown("docs/api-feeder.md").await
|
||||
}
|
||||
|
||||
pub async fn get_readme_md() -> Result<impl IntoResponse, ApiErr> {
|
||||
plc_platform_core::handler::doc::serve_markdown("README.md").await
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use axum::{
|
||||
use axum::{
|
||||
extract::{Path, Query, State},
|
||||
http::StatusCode,
|
||||
response::IntoResponse,
|
||||
|
|
@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize};
|
|||
use uuid::Uuid;
|
||||
use validator::Validate;
|
||||
|
||||
use crate::util::{
|
||||
use plc_platform_core::util::{
|
||||
pagination::{PaginatedResponse, PaginationParams},
|
||||
response::ApiErr,
|
||||
};
|
||||
|
|
@ -44,7 +44,7 @@ pub struct SignalRolePoint {
|
|||
#[derive(Serialize)]
|
||||
pub struct EquipmentListItem {
|
||||
#[serde(flatten)]
|
||||
pub equipment: crate::model::Equipment,
|
||||
pub equipment: plc_platform_core::model::Equipment,
|
||||
pub point_count: i64,
|
||||
pub role_points: Vec<SignalRolePoint>,
|
||||
}
|
||||
|
|
@ -88,11 +88,13 @@ pub async fn get_equipment_list(
|
|||
|
||||
let data = items
|
||||
.into_iter()
|
||||
.map(|item| EquipmentListItem {
|
||||
role_points: role_points_map
|
||||
.remove(&item.equipment.id)
|
||||
.unwrap_or_default(),
|
||||
..item
|
||||
.map(|item| {
|
||||
let equipment_id = item.equipment.id;
|
||||
EquipmentListItem {
|
||||
equipment: item.equipment,
|
||||
point_count: item.point_count,
|
||||
role_points: role_points_map.remove(&equipment_id).unwrap_or_default(),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
|
|
@ -329,3 +331,5 @@ pub async fn delete_equipment(
|
|||
|
||||
Ok(StatusCode::NO_CONTENT)
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -1,12 +1,12 @@
|
|||
use axum::{Json, extract::{Path, Query, State}, http::StatusCode, response::IntoResponse};
|
||||
use axum::{Json, extract::{Path, Query, State}, http::StatusCode, response::IntoResponse};
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
use sqlx::types::Json as SqlxJson;
|
||||
use uuid::Uuid;
|
||||
use validator::Validate;
|
||||
|
||||
use crate::model::Page;
|
||||
use crate::util::response::ApiErr;
|
||||
use plc_platform_core::model::Page;
|
||||
use plc_platform_core::util::response::ApiErr;
|
||||
use crate::AppState;
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
|
|
@ -11,15 +11,15 @@ use std::collections::{HashMap, HashSet};
|
|||
use uuid::Uuid;
|
||||
use validator::Validate;
|
||||
|
||||
use crate::util::{
|
||||
use plc_platform_core::util::{
|
||||
pagination::{PaginatedResponse, PaginationParams},
|
||||
response::ApiErr,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
model::{Node, Point},
|
||||
AppState,
|
||||
};
|
||||
use plc_platform_core::model::{Node, Point};
|
||||
|
||||
async fn notify_units(
|
||||
state: &AppState,
|
||||
|
|
@ -56,7 +56,7 @@ pub struct GetPointHistoryQuery {
|
|||
|
||||
#[derive(Serialize)]
|
||||
pub struct PointHistoryItem {
|
||||
#[serde(serialize_with = "crate::util::datetime::option_utc_to_local_str")]
|
||||
#[serde(serialize_with = "plc_platform_core::util::datetime::option_utc_to_local_str")]
|
||||
pub timestamp: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub quality: crate::telemetry::PointQuality,
|
||||
pub value: Option<crate::telemetry::DataValue>,
|
||||
|
|
@ -71,10 +71,10 @@ pub async fn get_point_list(
|
|||
query.validate()?;
|
||||
let pool = &state.pool;
|
||||
|
||||
// 获取总数
|
||||
// Count total rows.
|
||||
let total = crate::service::get_points_count(pool, query.source_id, query.equipment_id).await?;
|
||||
|
||||
// 获取分页数据
|
||||
// Load current page rows.
|
||||
let points = crate::service::get_points_paginated(
|
||||
pool,
|
||||
query.source_id,
|
||||
|
|
@ -690,3 +690,4 @@ fn monitor_value_to_number(item: &crate::telemetry::PointMonitorInfo) -> Option<
|
|||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use axum::{Json, extract::{Path, State}, http::StatusCode, response::IntoResponse};
|
||||
use axum::{Json, extract::{Path, State}, http::StatusCode, response::IntoResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
|
@ -11,13 +11,14 @@ use opcua::types::ReferenceTypeId;
|
|||
use opcua::client::Session;
|
||||
use std::collections::{HashMap, VecDeque};
|
||||
|
||||
use crate::util::response::ApiErr;
|
||||
use plc_platform_core::util::response::ApiErr;
|
||||
|
||||
use crate::{AppState, model::{Node, Source}};
|
||||
use anyhow::{Context};
|
||||
use plc_platform_core::model::{Node, Source};
|
||||
use crate::AppState;
|
||||
use sqlx::QueryBuilder;
|
||||
|
||||
// 树节点结构体
|
||||
// 鏍戣妭鐐圭粨鏋勪綋
|
||||
#[derive(Debug, Serialize, Clone)]
|
||||
pub struct TreeNode {
|
||||
pub id: Uuid,
|
||||
|
|
@ -54,14 +55,14 @@ impl TreeNode {
|
|||
}
|
||||
|
||||
|
||||
// 带连接状态的Source响应结构体
|
||||
// 甯﹁繛鎺ョ姸鎬佺殑Source鍝嶅簲缁撴瀯浣?
|
||||
#[derive(Debug, Serialize, Clone)]
|
||||
pub struct SourceWithStatus {
|
||||
#[serde(flatten)]
|
||||
pub source: SourcePublic,
|
||||
pub is_connected: bool,
|
||||
pub last_error: Option<String>,
|
||||
#[serde(serialize_with = "crate::util::datetime::option_utc_to_local_str")]
|
||||
#[serde(serialize_with = "plc_platform_core::util::datetime::option_utc_to_local_str")]
|
||||
pub last_time: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
|
|
@ -74,9 +75,9 @@ pub struct SourcePublic {
|
|||
pub security_policy: Option<String>,
|
||||
pub security_mode: Option<String>,
|
||||
pub enabled: bool,
|
||||
#[serde(serialize_with = "crate::util::datetime::utc_to_local_str")]
|
||||
#[serde(serialize_with = "plc_platform_core::util::datetime::utc_to_local_str")]
|
||||
pub created_at: DateTime<Utc>,
|
||||
#[serde(serialize_with = "crate::util::datetime::utc_to_local_str")]
|
||||
#[serde(serialize_with = "plc_platform_core::util::datetime::utc_to_local_str")]
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
|
|
@ -100,14 +101,14 @@ pub async fn get_source_list(State(state): State<AppState>) -> Result<impl IntoR
|
|||
let pool = &state.pool;
|
||||
let sources: Vec<Source> = crate::service::get_all_enabled_sources(pool).await?;
|
||||
|
||||
// 获取所有连接状态
|
||||
// 鑾峰彇鎵€鏈夎繛鎺ョ姸鎬?
|
||||
let status_map: std::collections::HashMap<Uuid, (bool, Option<String>, Option<DateTime<Utc>>)> =
|
||||
state.connection_manager.get_all_status().await
|
||||
.into_iter()
|
||||
.map(|(source_id, s)| (source_id, (s.is_connected, s.last_error, Some(s.last_time))))
|
||||
.collect();
|
||||
|
||||
// 组合Source和连接状态
|
||||
// 缁勫悎Source鍜岃繛鎺ョ姸鎬?
|
||||
let sources_with_status: Vec<SourceWithStatus> = sources
|
||||
.into_iter()
|
||||
.map(|source| {
|
||||
|
|
@ -133,7 +134,7 @@ pub async fn get_node_tree(
|
|||
) -> Result<impl IntoResponse, ApiErr> {
|
||||
let pool = &state.pool;
|
||||
|
||||
// 查询所有属于该source的节点
|
||||
// 鏌ヨ鎵€鏈夊睘浜庤source鐨勮妭鐐?
|
||||
let nodes: Vec<Node> = sqlx::query_as::<_, Node>(
|
||||
r#"SELECT * FROM node WHERE source_id = $1 ORDER BY created_at"#,
|
||||
)
|
||||
|
|
@ -141,7 +142,7 @@ pub async fn get_node_tree(
|
|||
.fetch_all(pool)
|
||||
.await?;
|
||||
|
||||
// 构建节点树
|
||||
// 鏋勫缓鑺傜偣鏍?
|
||||
let tree = build_node_tree(nodes);
|
||||
|
||||
Ok(Json(tree))
|
||||
|
|
@ -152,7 +153,7 @@ fn build_node_tree(nodes: Vec<Node>) -> Vec<TreeNode> {
|
|||
let mut children_map: HashMap<Uuid, Vec<Uuid>> = HashMap::new();
|
||||
let mut roots: Vec<Uuid> = Vec::new();
|
||||
|
||||
// ① 转换 + 记录 parent 关系
|
||||
// 鈶?杞崲 + 璁板綍 parent 鍏崇郴
|
||||
for node in nodes {
|
||||
let tree_node = TreeNode::from_node(node);
|
||||
let id = tree_node.id;
|
||||
|
|
@ -166,7 +167,7 @@ fn build_node_tree(nodes: Vec<Node>) -> Vec<TreeNode> {
|
|||
node_map.insert(id, tree_node);
|
||||
}
|
||||
|
||||
// ② 递归构建
|
||||
// 鈶?閫掑綊鏋勫缓
|
||||
fn attach_children(
|
||||
id: Uuid,
|
||||
node_map: &mut HashMap<Uuid, TreeNode>,
|
||||
|
|
@ -185,7 +186,7 @@ fn build_node_tree(nodes: Vec<Node>) -> Vec<TreeNode> {
|
|||
Some(node)
|
||||
}
|
||||
|
||||
// ③ 生成最终树
|
||||
// 鈶?鐢熸垚鏈€缁堟爲
|
||||
roots
|
||||
.into_iter()
|
||||
.filter_map(|rid| attach_children(rid, &mut node_map, &children_map))
|
||||
|
|
@ -221,11 +222,11 @@ pub async fn create_source(
|
|||
.bind(&payload.name)
|
||||
.bind(&payload.endpoint)
|
||||
.bind(payload.enabled)
|
||||
.bind("opcua") //默认opcua协议
|
||||
.bind("opcua") //榛樿opcua鍗忚
|
||||
.execute(pool)
|
||||
.await?;
|
||||
|
||||
// 触发 SourceCreate 事件
|
||||
// 瑙﹀彂 SourceCreate 浜嬩欢
|
||||
let _ = state.event_manager.send(crate::event::AppEvent::SourceCreate { source_id: new_id });
|
||||
|
||||
Ok((StatusCode::CREATED, Json(CreateSourceRes { id: new_id })))
|
||||
|
|
@ -323,7 +324,7 @@ pub async fn delete_source(
|
|||
.execute(pool)
|
||||
.await?;
|
||||
|
||||
// 触发 SourceDelete 事件
|
||||
// 瑙﹀彂 SourceDelete 浜嬩欢
|
||||
let _ = state.event_manager.send(crate::event::AppEvent::SourceDelete { source_id, source_name });
|
||||
|
||||
Ok(StatusCode::NO_CONTENT)
|
||||
|
|
@ -363,7 +364,7 @@ pub async fn browse_and_save_nodes(
|
|||
|
||||
let pool = &state.pool;
|
||||
|
||||
// 确认 source 存在
|
||||
// 纭 source 瀛樺湪
|
||||
sqlx::query("SELECT 1 FROM source WHERE id = $1")
|
||||
.bind(source_id)
|
||||
.fetch_one(pool)
|
||||
|
|
@ -374,11 +375,11 @@ pub async fn browse_and_save_nodes(
|
|||
.await
|
||||
.ok_or_else(|| anyhow::anyhow!("Source not connected"))?;
|
||||
|
||||
// 读取 namespace 映射
|
||||
// 璇诲彇 namespace 鏄犲皠
|
||||
let namespace_map = load_namespace_map(&session).await
|
||||
.context("Failed to load namespace map")?;
|
||||
|
||||
// 开启事务(整次浏览一个事务)
|
||||
// 寮€鍚簨鍔★紙鏁存娴忚涓€涓簨鍔★級
|
||||
let mut tx = pool.begin().await
|
||||
.context("Failed to begin transaction")?;
|
||||
|
||||
|
|
@ -411,7 +412,7 @@ pub async fn browse_and_save_nodes(
|
|||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////
|
||||
// 浏览单个节点(含 continuation)
|
||||
// 娴忚鍗曚釜鑺傜偣锛堝惈 continuation锛?
|
||||
////////////////////////////////////////////////////////////////
|
||||
|
||||
async fn browse_single_node(
|
||||
|
|
@ -468,7 +469,7 @@ async fn browse_single_node(
|
|||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////
|
||||
// 处理单个 Reference(核心优化版)
|
||||
// 澶勭悊鍗曚釜 Reference锛堟牳蹇冧紭鍖栫増锛?
|
||||
////////////////////////////////////////////////////////////////
|
||||
|
||||
async fn process_reference(
|
||||
|
|
@ -484,7 +485,7 @@ async fn process_reference(
|
|||
let node_id_obj = &ref_desc.node_id.node_id;
|
||||
let node_id_str = node_id_obj.to_string();
|
||||
|
||||
// 内存去重
|
||||
// 鍐呭瓨鍘婚噸
|
||||
if processed_nodes.contains_key(&node_id_str) {
|
||||
return Ok(());
|
||||
}
|
||||
|
|
@ -567,7 +568,7 @@ async fn process_reference(
|
|||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////
|
||||
// 解析 NodeId
|
||||
// 瑙f瀽 NodeId
|
||||
////////////////////////////////////////////////////////////////
|
||||
|
||||
fn parse_node_id(node_id: &NodeId) -> (Option<u16>, Option<String>, String) {
|
||||
|
|
@ -585,13 +586,13 @@ fn parse_node_id(node_id: &NodeId) -> (Option<u16>, Option<String>, String) {
|
|||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////
|
||||
// 读取 NamespaceArray
|
||||
// 璇诲彇 NamespaceArray
|
||||
////////////////////////////////////////////////////////////////
|
||||
|
||||
async fn load_namespace_map(
|
||||
session: &Session,
|
||||
) -> anyhow::Result<HashMap<i32, String>> {
|
||||
// 读取命名空间数组节点
|
||||
// 璇诲彇鍛藉悕绌洪棿鏁扮粍鑺傜偣
|
||||
let ns_node = NodeId::new(0, 2255);
|
||||
let read_request = ReadValueId {
|
||||
node_id: ns_node,
|
||||
|
|
@ -600,11 +601,11 @@ async fn load_namespace_map(
|
|||
data_encoding: Default::default(),
|
||||
};
|
||||
|
||||
// 执行读取操作
|
||||
// 鎵ц璇诲彇鎿嶄綔
|
||||
let result = session.read(&[read_request], TimestampsToReturn::Neither, 0f64).await
|
||||
.context("Failed to read namespace map")?;
|
||||
|
||||
// 解析并构建命名空间映射
|
||||
// 瑙f瀽骞舵瀯寤哄懡鍚嶇┖闂存槧灏?
|
||||
let mut map = HashMap::new();
|
||||
if let Some(value) = &result[0].value {
|
||||
if let Variant::Array(array) = value {
|
||||
|
|
@ -622,3 +623,4 @@ async fn load_namespace_map(
|
|||
}
|
||||
|
||||
|
||||
|
||||
|
|
@ -3,10 +3,13 @@ use serde::Deserialize;
|
|||
use uuid::Uuid;
|
||||
use validator::Validate;
|
||||
|
||||
use crate::util::{response::ApiErr, pagination::{PaginatedResponse, PaginationParams}};
|
||||
use plc_platform_core::util::{
|
||||
pagination::{PaginatedResponse, PaginationParams},
|
||||
response::ApiErr,
|
||||
};
|
||||
use crate::{AppState};
|
||||
|
||||
/// 获取所有标签
|
||||
/// List all tags.
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct GetTagListQuery {
|
||||
#[serde(flatten)]
|
||||
|
|
@ -20,10 +23,10 @@ pub async fn get_tag_list(
|
|||
query.validate()?;
|
||||
let pool = &state.pool;
|
||||
|
||||
// 获取总数
|
||||
// Count total rows.
|
||||
let total = crate::service::get_tags_count(pool).await?;
|
||||
|
||||
// 获取分页数据
|
||||
// Load current page rows.
|
||||
let tags = crate::service::get_tags_paginated(
|
||||
pool,
|
||||
query.pagination.page_size,
|
||||
|
|
@ -35,7 +38,7 @@ pub async fn get_tag_list(
|
|||
Ok(Json(response))
|
||||
}
|
||||
|
||||
/// 获取标签下的点位信息
|
||||
/// List points under a tag.
|
||||
pub async fn get_tag_points(
|
||||
State(state): State<AppState>,
|
||||
Path(tag_id): Path<Uuid>,
|
||||
|
|
@ -60,7 +63,7 @@ pub struct UpdateTagReq {
|
|||
pub point_ids: Option<Vec<Uuid>>,
|
||||
}
|
||||
|
||||
/// 创建标签
|
||||
/// Create a tag.
|
||||
pub async fn create_tag(
|
||||
State(state): State<AppState>,
|
||||
Json(payload): Json<CreateTagReq>,
|
||||
|
|
@ -81,7 +84,7 @@ pub async fn create_tag(
|
|||
}))))
|
||||
}
|
||||
|
||||
/// 更新标签
|
||||
/// Update a tag.
|
||||
pub async fn update_tag(
|
||||
State(state): State<AppState>,
|
||||
Path(tag_id): Path<Uuid>,
|
||||
|
|
@ -89,7 +92,7 @@ pub async fn update_tag(
|
|||
) -> Result<impl IntoResponse, ApiErr> {
|
||||
payload.validate()?;
|
||||
|
||||
// 检查标签是否存在
|
||||
// Ensure the target tag exists.
|
||||
let exists = crate::service::get_tag_by_id(&state.pool, tag_id).await?;
|
||||
if exists.is_none() {
|
||||
return Err(ApiErr::NotFound("Tag not found".to_string(), None));
|
||||
|
|
@ -108,7 +111,7 @@ pub async fn update_tag(
|
|||
})))
|
||||
}
|
||||
|
||||
/// 删除标签
|
||||
/// Delete a tag.
|
||||
pub async fn delete_tag(
|
||||
State(state): State<AppState>,
|
||||
Path(tag_id): Path<Uuid>,
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
pub mod app;
|
||||
pub mod config;
|
||||
pub mod control;
|
||||
pub mod event;
|
||||
pub mod handler;
|
||||
pub mod middleware;
|
||||
pub mod router;
|
||||
pub mod websocket;
|
||||
|
||||
pub mod connection {
|
||||
pub use plc_platform_core::connection::*;
|
||||
}
|
||||
|
||||
pub mod db {
|
||||
pub use plc_platform_core::db::*;
|
||||
}
|
||||
|
||||
pub mod service {
|
||||
pub use plc_platform_core::service::*;
|
||||
}
|
||||
|
||||
pub mod telemetry {
|
||||
pub use plc_platform_core::telemetry::*;
|
||||
}
|
||||
|
||||
pub mod util {
|
||||
pub use plc_platform_core::util::*;
|
||||
}
|
||||
|
||||
pub use app::{run, AppState, test_state};
|
||||
pub use router::build_router;
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
#![cfg_attr(all(windows, not(debug_assertions)), windows_subsystem = "windows")]
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
app_feeder_distributor::run().await;
|
||||
}
|
||||
|
|
@ -10,9 +10,9 @@ pub async fn simple_logger(
|
|||
req: Request<Body>,
|
||||
next: Next,
|
||||
) -> Response {
|
||||
// 直接获取字符串引用,不用克隆
|
||||
// Borrow the path string directly; no clone needed.
|
||||
let method = req.method().to_string();
|
||||
let uri = req.uri().to_string(); // Uri 的 to_string() 创建新字符串
|
||||
let uri = req.uri().to_string(); // `Uri::to_string()` allocates the owned string once.
|
||||
|
||||
let start = Instant::now();
|
||||
let res = next.run(req).await;
|
||||
|
|
@ -0,0 +1,188 @@
|
|||
use axum::{
|
||||
extract::Request,
|
||||
middleware::Next,
|
||||
response::Response,
|
||||
routing::{get, post, put},
|
||||
Router,
|
||||
};
|
||||
use tower_http::cors::{Any, CorsLayer};
|
||||
use tower_http::services::ServeDir;
|
||||
|
||||
use crate::{handler, middleware::simple_logger, websocket, AppState};
|
||||
|
||||
async fn no_cache(req: Request, next: Next) -> Response {
|
||||
let mut response = next.run(req).await;
|
||||
response.headers_mut().insert(
|
||||
axum::http::header::CACHE_CONTROL,
|
||||
axum::http::HeaderValue::from_static("no-store"),
|
||||
);
|
||||
response
|
||||
}
|
||||
|
||||
pub fn build_router(state: AppState) -> Router {
|
||||
let all_route = Router::new()
|
||||
.route(
|
||||
"/api/source",
|
||||
get(handler::source::get_source_list).post(handler::source::create_source),
|
||||
)
|
||||
.route(
|
||||
"/api/source/{source_id}",
|
||||
axum::routing::delete(handler::source::delete_source)
|
||||
.put(handler::source::update_source),
|
||||
)
|
||||
.route(
|
||||
"/api/source/{source_id}/reconnect",
|
||||
axum::routing::post(handler::source::reconnect_source),
|
||||
)
|
||||
.route(
|
||||
"/api/source/{source_id}/browse",
|
||||
axum::routing::post(handler::source::browse_and_save_nodes),
|
||||
)
|
||||
.route(
|
||||
"/api/source/{source_id}/node-tree",
|
||||
get(handler::source::get_node_tree),
|
||||
)
|
||||
.route("/api/point", get(handler::point::get_point_list))
|
||||
.route(
|
||||
"/api/point/value/batch",
|
||||
axum::routing::post(handler::point::batch_set_point_value),
|
||||
)
|
||||
.route(
|
||||
"/api/point/batch",
|
||||
axum::routing::post(handler::point::batch_create_points)
|
||||
.delete(handler::point::batch_delete_points),
|
||||
)
|
||||
.route(
|
||||
"/api/point/{point_id}/history",
|
||||
get(handler::point::get_point_history),
|
||||
)
|
||||
.route(
|
||||
"/api/point/{point_id}",
|
||||
get(handler::point::get_point)
|
||||
.put(handler::point::update_point)
|
||||
.delete(handler::point::delete_point),
|
||||
)
|
||||
.route(
|
||||
"/api/point/batch/set-tags",
|
||||
put(handler::point::batch_set_point_tags),
|
||||
)
|
||||
.route(
|
||||
"/api/point/batch/set-equipment",
|
||||
put(handler::point::batch_set_point_equipment),
|
||||
)
|
||||
.route(
|
||||
"/api/equipment",
|
||||
get(handler::equipment::get_equipment_list).post(handler::equipment::create_equipment),
|
||||
)
|
||||
.route(
|
||||
"/api/equipment/{equipment_id}",
|
||||
get(handler::equipment::get_equipment)
|
||||
.put(handler::equipment::update_equipment)
|
||||
.delete(handler::equipment::delete_equipment),
|
||||
)
|
||||
.route(
|
||||
"/api/equipment/batch/set-unit",
|
||||
put(handler::equipment::batch_set_equipment_unit),
|
||||
)
|
||||
.route(
|
||||
"/api/equipment/{equipment_id}/points",
|
||||
get(handler::equipment::get_equipment_points),
|
||||
)
|
||||
.route(
|
||||
"/api/unit",
|
||||
get(handler::control::get_unit_list).post(handler::control::create_unit),
|
||||
)
|
||||
.route(
|
||||
"/api/unit/{unit_id}",
|
||||
get(handler::control::get_unit)
|
||||
.put(handler::control::update_unit)
|
||||
.delete(handler::control::delete_unit),
|
||||
)
|
||||
.route("/api/event", get(handler::control::get_event_list))
|
||||
.route(
|
||||
"/api/control/equipment/{equipment_id}/start",
|
||||
post(handler::control::start_equipment),
|
||||
)
|
||||
.route(
|
||||
"/api/control/equipment/{equipment_id}/stop",
|
||||
post(handler::control::stop_equipment),
|
||||
)
|
||||
.route(
|
||||
"/api/control/unit/{unit_id}/start-auto",
|
||||
post(handler::control::start_auto_unit),
|
||||
)
|
||||
.route(
|
||||
"/api/control/unit/{unit_id}/stop-auto",
|
||||
post(handler::control::stop_auto_unit),
|
||||
)
|
||||
.route(
|
||||
"/api/control/unit/batch-start-auto",
|
||||
post(handler::control::batch_start_auto),
|
||||
)
|
||||
.route(
|
||||
"/api/control/unit/batch-stop-auto",
|
||||
post(handler::control::batch_stop_auto),
|
||||
)
|
||||
.route(
|
||||
"/api/control/unit/{unit_id}/ack-fault",
|
||||
post(handler::control::ack_fault_unit),
|
||||
)
|
||||
.route(
|
||||
"/api/unit/{unit_id}/runtime",
|
||||
get(handler::control::get_unit_runtime),
|
||||
)
|
||||
.route(
|
||||
"/api/unit/{unit_id}/detail",
|
||||
get(handler::control::get_unit_detail),
|
||||
)
|
||||
.route(
|
||||
"/api/tag",
|
||||
get(handler::tag::get_tag_list).post(handler::tag::create_tag),
|
||||
)
|
||||
.route(
|
||||
"/api/tag/{tag_id}",
|
||||
get(handler::tag::get_tag_points)
|
||||
.put(handler::tag::update_tag)
|
||||
.delete(handler::tag::delete_tag),
|
||||
)
|
||||
.route(
|
||||
"/api/page",
|
||||
get(handler::page::get_page_list).post(handler::page::create_page),
|
||||
)
|
||||
.route(
|
||||
"/api/page/{page_id}",
|
||||
get(handler::page::get_page)
|
||||
.put(handler::page::update_page)
|
||||
.delete(handler::page::delete_page),
|
||||
)
|
||||
.route("/api/logs", get(handler::log::get_logs))
|
||||
.route("/api/logs/stream", get(handler::log::stream_logs))
|
||||
.route("/api/docs/api-md", get(handler::doc::get_api_md))
|
||||
.route("/api/docs/readme-md", get(handler::doc::get_readme_md));
|
||||
|
||||
Router::new()
|
||||
.merge(all_route)
|
||||
.nest(
|
||||
"/ui",
|
||||
Router::new()
|
||||
.fallback_service(
|
||||
ServeDir::new("web/feeder")
|
||||
.append_index_html_on_directories(true)
|
||||
.fallback(ServeDir::new("web/core")),
|
||||
)
|
||||
.layer(axum::middleware::from_fn(no_cache)),
|
||||
)
|
||||
.route("/ws/public", get(websocket::public_websocket_handler))
|
||||
.route(
|
||||
"/ws/client/{client_id}",
|
||||
get(websocket::client_websocket_handler),
|
||||
)
|
||||
.layer(axum::middleware::from_fn(simple_logger))
|
||||
.layer(
|
||||
CorsLayer::new()
|
||||
.allow_origin(Any)
|
||||
.allow_methods(Any)
|
||||
.allow_headers(Any),
|
||||
)
|
||||
.with_state(state)
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use axum::{
|
||||
use axum::{
|
||||
extract::{
|
||||
ws::{Message, WebSocket, WebSocketUpgrade},
|
||||
Path, State,
|
||||
|
|
@ -17,7 +17,7 @@ use uuid::Uuid;
|
|||
pub enum WsMessage {
|
||||
PointNewValue(crate::telemetry::PointMonitorInfo),
|
||||
PointSetValueBatchResult(crate::connection::BatchSetPointValueRes),
|
||||
EventCreated(crate::model::EventRecord),
|
||||
EventCreated(plc_platform_core::model::EventRecord),
|
||||
UnitRuntimeChanged(crate::control::runtime::UnitRuntime),
|
||||
}
|
||||
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
use axum::{
|
||||
body::Body,
|
||||
http::{Method, Request, StatusCode},
|
||||
};
|
||||
use tower::ServiceExt;
|
||||
|
||||
#[tokio::test]
|
||||
async fn feeder_router_exposes_unit_api_route() {
|
||||
let app = app_feeder_distributor::build_router(app_feeder_distributor::app::test_state());
|
||||
|
||||
let response = app
|
||||
.oneshot(
|
||||
Request::builder()
|
||||
.method(Method::DELETE)
|
||||
.uri("/api/unit")
|
||||
.body(Body::empty())
|
||||
.expect("request should build"),
|
||||
)
|
||||
.await
|
||||
.expect("router should answer request");
|
||||
|
||||
assert_eq!(response.status(), StatusCode::METHOD_NOT_ALLOWED);
|
||||
}
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
[package]
|
||||
name = "app_operation_system"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
plc_platform_core = { path = "../plc_platform_core" }
|
||||
tokio = { version = "1.49", features = ["full"] }
|
||||
axum = { version = "0.8", features = ["ws"] }
|
||||
tower-http = { version = "0.6", features = ["cors", "fs"] }
|
||||
tracing = "0.1"
|
||||
dotenv = "0.15"
|
||||
|
||||
[dev-dependencies]
|
||||
tower = { version = "0.5", features = ["util"] }
|
||||
|
||||
[[bin]]
|
||||
name = "app_operation_system"
|
||||
path = "src/main.rs"
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
use crate::router::build_router;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AppConfig {
|
||||
pub server_host: String,
|
||||
pub server_port: u16,
|
||||
}
|
||||
|
||||
impl AppConfig {
|
||||
pub fn from_env() -> Self {
|
||||
Self {
|
||||
server_host: std::env::var("OPS_SERVER_HOST")
|
||||
.unwrap_or_else(|_| "127.0.0.1".to_string()),
|
||||
server_port: std::env::var("OPS_SERVER_PORT")
|
||||
.ok()
|
||||
.and_then(|value| value.parse().ok())
|
||||
.unwrap_or(3100),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AppState {
|
||||
pub app_name: &'static str,
|
||||
pub config: AppConfig,
|
||||
}
|
||||
|
||||
pub async fn run() {
|
||||
dotenv::dotenv().ok();
|
||||
plc_platform_core::util::log::init_logger();
|
||||
let _platform = plc_platform_core::bootstrap::bootstrap_platform();
|
||||
let _single_instance =
|
||||
match plc_platform_core::util::single_instance::try_acquire("PLCControl.OperationSystem") {
|
||||
Ok(guard) => guard,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => {
|
||||
tracing::warn!("Another operation-system instance is already running");
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!("Failed to initialize single instance guard: {}", err);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let state = AppState {
|
||||
app_name: "operation-system",
|
||||
config: AppConfig::from_env(),
|
||||
};
|
||||
let app = build_router(state.clone());
|
||||
let addr = format!("{}:{}", state.config.server_host, state.config.server_port);
|
||||
tracing::info!("Starting operation-system server at http://{}", addr);
|
||||
let listener = tokio::net::TcpListener::bind(&addr)
|
||||
.await
|
||||
.expect("operation-system listener should bind");
|
||||
|
||||
axum::serve(listener, app)
|
||||
.await
|
||||
.expect("operation-system server should run");
|
||||
}
|
||||
|
||||
pub fn test_state() -> AppState {
|
||||
AppState {
|
||||
app_name: "operation-system",
|
||||
config: AppConfig {
|
||||
server_host: "127.0.0.1".to_string(),
|
||||
server_port: 0,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1 @@
|
|||
pub mod doc;
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
use axum::response::IntoResponse;
|
||||
use plc_platform_core::util::response::ApiErr;
|
||||
|
||||
pub async fn get_api_md() -> Result<impl IntoResponse, ApiErr> {
|
||||
plc_platform_core::handler::doc::serve_markdown("docs/api-ops.md").await
|
||||
}
|
||||
|
||||
pub async fn get_readme_md() -> Result<impl IntoResponse, ApiErr> {
|
||||
plc_platform_core::handler::doc::serve_markdown("README.md").await
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
pub mod app;
|
||||
pub mod handler;
|
||||
pub mod router;
|
||||
|
||||
pub use app::{run, test_state, AppState};
|
||||
pub use router::build_router;
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
#![cfg_attr(all(windows, not(debug_assertions)), windows_subsystem = "windows")]
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
app_operation_system::run().await;
|
||||
}
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
use axum::{extract::State, routing::get, Router};
|
||||
use tower_http::services::ServeDir;
|
||||
|
||||
use crate::app::AppState;
|
||||
|
||||
async fn no_cache(
|
||||
req: axum::extract::Request,
|
||||
next: axum::middleware::Next,
|
||||
) -> axum::response::Response {
|
||||
let mut response = next.run(req).await;
|
||||
response.headers_mut().insert(
|
||||
axum::http::header::CACHE_CONTROL,
|
||||
axum::http::HeaderValue::from_static("no-store"),
|
||||
);
|
||||
response
|
||||
}
|
||||
|
||||
pub fn build_router(state: AppState) -> Router {
|
||||
Router::new()
|
||||
.route("/api/health", get(health_check))
|
||||
.route("/api/logs", get(plc_platform_core::handler::log::get_logs))
|
||||
.route("/api/logs/stream", get(plc_platform_core::handler::log::stream_logs))
|
||||
.route("/api/docs/api-md", get(crate::handler::doc::get_api_md))
|
||||
.route("/api/docs/readme-md", get(crate::handler::doc::get_readme_md))
|
||||
.nest(
|
||||
"/ui",
|
||||
Router::new()
|
||||
.fallback_service(
|
||||
ServeDir::new("web/ops")
|
||||
.append_index_html_on_directories(true)
|
||||
.fallback(ServeDir::new("web/core")),
|
||||
)
|
||||
.layer(axum::middleware::from_fn(no_cache)),
|
||||
)
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
async fn health_check(State(state): State<AppState>) -> String {
|
||||
format!("{}:ok", state.app_name)
|
||||
}
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
use axum::{
|
||||
body::Body,
|
||||
http::{Method, Request, StatusCode},
|
||||
};
|
||||
use tower::ServiceExt;
|
||||
|
||||
#[tokio::test]
|
||||
async fn operation_system_router_exposes_health_endpoint() {
|
||||
let app = app_operation_system::build_router(app_operation_system::app::test_state());
|
||||
|
||||
let response = app
|
||||
.oneshot(
|
||||
Request::builder()
|
||||
.method(Method::GET)
|
||||
.uri("/api/health")
|
||||
.body(Body::empty())
|
||||
.expect("request should build"),
|
||||
)
|
||||
.await
|
||||
.expect("router should answer request");
|
||||
|
||||
assert_eq!(response.status(), StatusCode::OK);
|
||||
}
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
[package]
|
||||
name = "plc_platform_core"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1.49", features = ["full"] }
|
||||
axum = { version = "0.8", features = ["ws"] }
|
||||
tower-http = { version = "0.6", features = ["cors", "fs"] }
|
||||
sqlx = { version = "0.8", features = ["runtime-tokio", "postgres", "chrono", "uuid", "json"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde_with = "3.0"
|
||||
async-stream = "0.3"
|
||||
chrono = "0.4"
|
||||
time = "0.3"
|
||||
uuid = { version = "1.21", features = ["serde", "v4"] }
|
||||
async-opcua = { version = "0.18", features = ["client"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "time", "json"] }
|
||||
tracing-appender = "0.2"
|
||||
dotenv = "0.15"
|
||||
validator = { version = "0.20", features = ["derive"] }
|
||||
anyhow = "1.0"
|
||||
fs2 = "0.4"
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
use crate::platform_context::PlatformContext;
|
||||
|
||||
pub fn bootstrap_platform() -> PlatformContext {
|
||||
PlatformContext::new("bootstrap")
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use chrono::{DateTime, Utc};
|
||||
use opcua::{
|
||||
client::{ClientBuilder, IdentityToken, Session},
|
||||
crypto::SecurityPolicy,
|
||||
|
|
@ -18,14 +18,18 @@ use std::{
|
|||
use tokio::task::JoinHandle;
|
||||
use tokio::sync::RwLock;
|
||||
use uuid::Uuid;
|
||||
use crate::model::{PointSubscriptionInfo, ScanMode};
|
||||
|
||||
use crate::{
|
||||
model::{PointSubscriptionInfo, ScanMode},
|
||||
telemetry::PointMonitorInfo,
|
||||
};
|
||||
|
||||
const DEFAULT_POINT_RING_BUFFER_LEN: usize = 1000;
|
||||
|
||||
pub trait PointEventSink: Send + Sync {
|
||||
fn send_point_new_value(&self, payload: crate::telemetry::PointNewValue) -> Result<(), String>;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
||||
pub struct SetPointValueReqItem {
|
||||
pub point_id: Uuid,
|
||||
|
|
@ -95,11 +99,11 @@ pub struct ConnectionStatus {
|
|||
pub next_client_handle: u32,
|
||||
pub client_handle_map: HashMap<u32, Uuid>, // client_handle -> point_id
|
||||
pub monitored_item_map: HashMap<Uuid, u32>, // point_id -> monitored_item_id
|
||||
pub poll_points: Arc<Vec<PollPointInfo>>, // 正在轮询的点集合
|
||||
poll_handle: Option<JoinHandle<()>>, // 统一的轮询任务句柄
|
||||
heartbeat_handle: Option<JoinHandle<()>>, // 心跳任务句柄
|
||||
event_loop_handle: Option<JoinHandle<opcua::types::StatusCode>>, // event_loop 任务句柄
|
||||
event_loop_monitor_handle: Option<JoinHandle<()>>, // event_loop 监控任务句柄
|
||||
pub poll_points: Arc<Vec<PollPointInfo>>, // 姝e湪杞鐨勭偣闆嗗悎
|
||||
poll_handle: Option<JoinHandle<()>>, // 缁熶竴鐨勮疆璇换鍔″彞鏌?
|
||||
heartbeat_handle: Option<JoinHandle<()>>, // 蹇冭烦浠诲姟鍙ユ焺
|
||||
event_loop_handle: Option<JoinHandle<opcua::types::StatusCode>>, // event_loop 浠诲姟鍙ユ焺
|
||||
event_loop_monitor_handle: Option<JoinHandle<()>>, // event_loop 鐩戞帶浠诲姟鍙ユ焺
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
|
@ -108,7 +112,7 @@ pub struct ConnectionManager {
|
|||
point_monitor_data: Arc<RwLock<HashMap<Uuid, PointMonitorInfo>>>,
|
||||
point_history_data: Arc<RwLock<HashMap<Uuid, VecDeque<PointMonitorInfo>>>>,
|
||||
point_write_target_cache: Arc<RwLock<HashMap<Uuid, PointWriteTarget>>>,
|
||||
event_manager: Option<std::sync::Arc<crate::event::EventManager>>,
|
||||
point_event_sink: Option<Arc<dyn PointEventSink>>,
|
||||
pool: Option<Arc<sqlx::PgPool>>,
|
||||
reconnect_tx: Option<tokio::sync::mpsc::UnboundedSender<Uuid>>,
|
||||
reconnect_rx: Arc<std::sync::Mutex<Option<tokio::sync::mpsc::UnboundedReceiver<Uuid>>>>,
|
||||
|
|
@ -169,7 +173,7 @@ impl ConnectionManager {
|
|||
point_monitor_data: Arc::new(RwLock::new(HashMap::new())),
|
||||
point_history_data: Arc::new(RwLock::new(HashMap::new())),
|
||||
point_write_target_cache: Arc::new(RwLock::new(HashMap::new())),
|
||||
event_manager: None,
|
||||
point_event_sink: None,
|
||||
pool: None,
|
||||
reconnect_tx: Some(reconnect_tx),
|
||||
reconnect_rx: Arc::new(std::sync::Mutex::new(Some(reconnect_rx))),
|
||||
|
|
@ -178,8 +182,8 @@ impl ConnectionManager {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn set_event_manager(&mut self, event_manager: std::sync::Arc<crate::event::EventManager>) {
|
||||
self.event_manager = Some(event_manager);
|
||||
pub fn set_event_manager(&mut self, point_event_sink: Arc<dyn PointEventSink>) {
|
||||
self.point_event_sink = Some(point_event_sink);
|
||||
}
|
||||
|
||||
pub fn set_pool(&mut self, pool: Arc<sqlx::PgPool>) {
|
||||
|
|
@ -188,7 +192,7 @@ impl ConnectionManager {
|
|||
|
||||
pub fn set_pool_and_start_reconnect_task(&mut self, pool: Arc<sqlx::PgPool>) {
|
||||
self.pool = Some(pool.clone());
|
||||
// 将 self 转换为不可变引用以调用 start_reconnect_task
|
||||
// 灏?self 杞崲涓轰笉鍙彉寮曠敤浠ヨ皟鐢?start_reconnect_task
|
||||
let manager = self.clone();
|
||||
manager.start_reconnect_task();
|
||||
}
|
||||
|
|
@ -205,7 +209,7 @@ impl ConnectionManager {
|
|||
let manager = self.clone();
|
||||
let pool = manager.pool.clone();
|
||||
tokio::spawn(async move {
|
||||
// 获取重连通道的接收端
|
||||
// 鑾峰彇閲嶈繛閫氶亾鐨勬帴鏀剁
|
||||
let mut reconnect_rx = manager.get_reconnect_rx().expect("Failed to get reconnect receiver");
|
||||
|
||||
while let Some(source_id) = reconnect_rx.recv().await {
|
||||
|
|
@ -328,18 +332,18 @@ impl ConnectionManager {
|
|||
let manager = self.clone();
|
||||
|
||||
let handle = tokio::spawn(async move {
|
||||
let mut ticker = tokio::time::interval(Duration::from_secs(8)); // 每8秒检测一次心跳
|
||||
let mut ticker = tokio::time::interval(Duration::from_secs(8)); // 姣?绉掓娴嬩竴娆″績璺?
|
||||
ticker.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
|
||||
loop {
|
||||
ticker.tick().await;
|
||||
|
||||
// 检查session是否有效
|
||||
// 妫€鏌ession鏄惁鏈夋晥
|
||||
let session = manager.get_session(source_id).await;
|
||||
|
||||
let (session_valid, subscription_valid) = if let Some(session) = session {
|
||||
// 尝试读取当前时间来验证连接
|
||||
let node_id = NodeId::new(0, 2258); // ServerCurrentTime节点
|
||||
// 灏濊瘯璇诲彇褰撳墠鏃堕棿鏉ラ獙璇佽繛鎺?
|
||||
let node_id = NodeId::new(0, 2258); // ServerCurrentTime鑺傜偣
|
||||
let read_request = ReadValueId {
|
||||
node_id,
|
||||
attribute_id: AttributeId::Value as u32,
|
||||
|
|
@ -352,8 +356,8 @@ impl ConnectionManager {
|
|||
Err(_) => false,
|
||||
};
|
||||
|
||||
// 检查订阅状态 - 仅当有 subscription_id 时才检查
|
||||
// 这里使用 set_publishing_mode 直接让服务器校验订阅 ID,避免仅靠会话读请求误判为健康。
|
||||
// 妫€鏌ヨ闃呯姸鎬?- 浠呭綋鏈?subscription_id 鏃舵墠妫€鏌?
|
||||
// 杩欓噷浣跨敤 set_publishing_mode 鐩存帴璁╂湇鍔″櫒鏍¢獙璁㈤槄 ID锛岄伩鍏嶄粎闈犱細璇濊璇锋眰璇垽涓哄仴搴枫€?
|
||||
let subscription_id = {
|
||||
let status = manager.status.read().await;
|
||||
status.get(&source_id).and_then(|conn_status| conn_status.subscription_id)
|
||||
|
|
@ -364,7 +368,7 @@ impl ConnectionManager {
|
|||
Err(_) => false,
|
||||
}
|
||||
} else {
|
||||
// 没有 subscription_id 时,认为订阅状态有效(不需要检查)
|
||||
// 娌℃湁 subscription_id 鏃讹紝璁や负璁㈤槄鐘舵€佹湁鏁堬紙涓嶉渶瑕佹鏌ワ級
|
||||
true
|
||||
};
|
||||
|
||||
|
|
@ -374,7 +378,7 @@ impl ConnectionManager {
|
|||
};
|
||||
|
||||
if !session_valid || !subscription_valid {
|
||||
// 检查是否已经在重连中
|
||||
// 妫€鏌ユ槸鍚﹀凡缁忓湪閲嶈繛涓?
|
||||
let mut reconnecting = manager.reconnecting.write().await;
|
||||
if !reconnecting.contains(&source_id) {
|
||||
reconnecting.insert(source_id);
|
||||
|
|
@ -392,7 +396,7 @@ impl ConnectionManager {
|
|||
source_id
|
||||
);
|
||||
|
||||
// 通过通道发送重连请求
|
||||
// 閫氳繃閫氶亾鍙戦€侀噸杩炶姹?
|
||||
if let Some(tx) = manager.reconnect_tx.as_ref() {
|
||||
if let Err(e) = tx.send(source_id) {
|
||||
tracing::error!("Failed to send reconnect request for source {}: {}", source_id, e);
|
||||
|
|
@ -407,7 +411,7 @@ impl ConnectionManager {
|
|||
}
|
||||
});
|
||||
|
||||
// 保存心跳任务句柄
|
||||
// 淇濆瓨蹇冭烦浠诲姟鍙ユ焺
|
||||
let mut status = self.status.write().await;
|
||||
if let Some(conn_status) = status.get_mut(&source_id) {
|
||||
conn_status.heartbeat_handle = Some(handle);
|
||||
|
|
@ -415,15 +419,15 @@ impl ConnectionManager {
|
|||
}
|
||||
|
||||
async fn start_unified_poll_task(&self, source_id: Uuid, session: Arc<Session>) {
|
||||
let event_manager = match self.event_manager.clone() {
|
||||
Some(em) => em,
|
||||
let point_event_sink = match self.point_event_sink.clone() {
|
||||
Some(sink) => sink,
|
||||
None => {
|
||||
tracing::warn!("Event manager is not initialized, cannot start unified poll task");
|
||||
tracing::warn!("Point event sink is not initialized, cannot start unified poll task");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// 停止旧的轮询任务
|
||||
// 鍋滄鏃х殑杞浠诲姟
|
||||
{
|
||||
let mut status = self.status.write().await;
|
||||
if let Some(conn_status) = status.get_mut(&source_id) {
|
||||
|
|
@ -438,10 +442,10 @@ impl ConnectionManager {
|
|||
source_id
|
||||
);
|
||||
|
||||
// 克隆 status 引用,以便在异步任务中使用
|
||||
// 鍏嬮殕 status 寮曠敤锛屼互渚垮湪寮傛浠诲姟涓娇鐢?
|
||||
let status_ref = self.status.clone();
|
||||
|
||||
// 启动新的轮询任务
|
||||
// 鍚姩鏂扮殑杞浠诲姟
|
||||
let handle = tokio::spawn(async move {
|
||||
let mut ticker = tokio::time::interval(Duration::from_secs(1));
|
||||
ticker.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
|
|
@ -449,7 +453,7 @@ impl ConnectionManager {
|
|||
loop {
|
||||
ticker.tick().await;
|
||||
|
||||
// 在任务内部获取轮询点列表
|
||||
// 鍦ㄤ换鍔″唴閮ㄨ幏鍙栬疆璇㈢偣鍒楄〃
|
||||
let poll_points = {
|
||||
let status = status_ref.read().await;
|
||||
status.get(&source_id)
|
||||
|
|
@ -461,7 +465,7 @@ impl ConnectionManager {
|
|||
continue;
|
||||
}
|
||||
|
||||
// 构建批量读取请求
|
||||
// 鏋勫缓鎵归噺璇诲彇璇锋眰
|
||||
let read_requests: Vec<ReadValueId> = poll_points
|
||||
.iter()
|
||||
.filter_map(|p| {
|
||||
|
|
@ -478,7 +482,7 @@ impl ConnectionManager {
|
|||
continue;
|
||||
}
|
||||
|
||||
// 执行批量读取
|
||||
// 鎵ц鎵归噺璇诲彇
|
||||
match session.read(&read_requests, TimestampsToReturn::Both, 0f64).await {
|
||||
Ok(results) => {
|
||||
for (poll_point, result) in poll_points.iter().zip(results.iter()) {
|
||||
|
|
@ -492,7 +496,7 @@ impl ConnectionManager {
|
|||
.map(crate::telemetry::PointQuality::from_status_code)
|
||||
.unwrap_or(crate::telemetry::PointQuality::Good);
|
||||
|
||||
let _ = event_manager.send(crate::event::AppEvent::PointNewValue(
|
||||
let _ = point_event_sink.send_point_new_value(
|
||||
crate::telemetry::PointNewValue {
|
||||
source_id,
|
||||
point_id: Some(poll_point.point_id),
|
||||
|
|
@ -505,7 +509,7 @@ impl ConnectionManager {
|
|||
timestamp: Some(Utc::now()),
|
||||
scan_mode: ScanMode::Poll,
|
||||
},
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
|
|
@ -519,14 +523,14 @@ impl ConnectionManager {
|
|||
}
|
||||
});
|
||||
|
||||
// 保存轮询任务句柄
|
||||
// 淇濆瓨杞浠诲姟鍙ユ焺
|
||||
let mut status = self.status.write().await;
|
||||
if let Some(conn_status) = status.get_mut(&source_id) {
|
||||
conn_status.poll_handle = Some(handle);
|
||||
}
|
||||
}
|
||||
|
||||
// 将点添加到轮询列表
|
||||
// 灏嗙偣娣诲姞鍒拌疆璇㈠垪琛?
|
||||
async fn add_points_to_poll_list(
|
||||
&self,
|
||||
source_id: Uuid,
|
||||
|
|
@ -534,12 +538,12 @@ impl ConnectionManager {
|
|||
) -> usize {
|
||||
let mut started = 0usize;
|
||||
|
||||
// 添加新的轮询点
|
||||
// 娣诲姞鏂扮殑杞鐐?
|
||||
{
|
||||
let mut status = self.status.write().await;
|
||||
if let Some(conn_status) = status.get_mut(&source_id) {
|
||||
for point in points {
|
||||
// 检查点是否已经在轮询列表中
|
||||
// 妫€鏌ョ偣鏄惁宸茬粡鍦ㄨ疆璇㈠垪琛ㄤ腑
|
||||
if !conn_status.poll_points.iter().any(|p| p.point_id == point.point_id) {
|
||||
Arc::make_mut(&mut conn_status.poll_points).push(PollPointInfo {
|
||||
point_id: point.point_id,
|
||||
|
|
@ -662,7 +666,7 @@ impl ConnectionManager {
|
|||
|
||||
let event_loop_handle = event_loop.spawn();
|
||||
|
||||
// 添加监控任务来捕获 event_loop 结束事件
|
||||
// 娣诲姞鐩戞帶浠诲姟鏉ユ崟鑾?event_loop 缁撴潫浜嬩欢
|
||||
let manager = self.clone();
|
||||
let source_id_copy = source_id;
|
||||
let event_loop_monitor_handle = tokio::spawn(async move {
|
||||
|
|
@ -683,7 +687,7 @@ impl ConnectionManager {
|
|||
}
|
||||
}
|
||||
|
||||
// 统一触发重连
|
||||
// 缁熶竴瑙﹀彂閲嶈繛
|
||||
if let Some(tx) = manager.reconnect_tx.as_ref() {
|
||||
let _ = tx.send(source_id_copy);
|
||||
}
|
||||
|
|
@ -710,16 +714,16 @@ impl ConnectionManager {
|
|||
poll_points: Arc::new(Vec::new()),
|
||||
poll_handle: None,
|
||||
heartbeat_handle: None,
|
||||
event_loop_handle: None, // event_loop_handle 已被移动到监控任务中
|
||||
event_loop_handle: None, // event_loop_handle 宸茶绉诲姩鍒扮洃鎺т换鍔′腑
|
||||
event_loop_monitor_handle: Some(event_loop_monitor_handle),
|
||||
},
|
||||
);
|
||||
drop(status); // 显式释放锁,在调用 start_unified_poll_task 之前
|
||||
drop(status); // 鏄惧紡閲婃斁閿侊紝鍦ㄨ皟鐢?start_unified_poll_task 涔嬪墠
|
||||
|
||||
// 启动统一的轮询任务
|
||||
// 鍚姩缁熶竴鐨勮疆璇换鍔?
|
||||
self.start_unified_poll_task(source_id, session).await;
|
||||
|
||||
// 启动心跳任务
|
||||
// 鍚姩蹇冭烦浠诲姟
|
||||
self.start_heartbeat_task(source_id).await;
|
||||
|
||||
tracing::info!("Successfully connected to source {}", source_id);
|
||||
|
|
@ -750,19 +754,19 @@ impl ConnectionManager {
|
|||
pub async fn reconnect(&self, pool: &sqlx::PgPool, source_id: Uuid) -> Result<(), String> {
|
||||
tracing::info!("Reconnecting to source {}", source_id);
|
||||
|
||||
// 先断开连接
|
||||
// 鍏堟柇寮€杩炴帴
|
||||
if let Err(e) = self.disconnect(source_id).await {
|
||||
tracing::error!("Failed to disconnect source {}: {}", source_id, e);
|
||||
}
|
||||
|
||||
// 再重新连接
|
||||
// 鍐嶉噸鏂拌繛鎺?
|
||||
let result = self.connect_from_source(pool, source_id).await;
|
||||
if result.is_ok() {
|
||||
let mut attempts = self.reconnect_attempts.write().await;
|
||||
attempts.remove(&source_id);
|
||||
}
|
||||
|
||||
// 无论成功还是失败都清除重连标记,以便心跳检测到问题后可以再次触发重连
|
||||
// 鏃犺鎴愬姛杩樻槸澶辫触閮芥竻闄ら噸杩炴爣璁帮紝浠ヤ究蹇冭烦妫€娴嬪埌闂鍚庡彲浠ュ啀娆¤Е鍙戦噸杩?
|
||||
let mut reconnecting = self.reconnecting.write().await;
|
||||
reconnecting.remove(&source_id);
|
||||
|
||||
|
|
@ -770,7 +774,7 @@ impl ConnectionManager {
|
|||
}
|
||||
|
||||
pub async fn disconnect(&self, source_id: Uuid) -> Result<(), String> {
|
||||
// 停止轮询任务并清空轮询点列表
|
||||
// 鍋滄杞浠诲姟骞舵竻绌鸿疆璇㈢偣鍒楄〃
|
||||
{
|
||||
let mut status = self.status.write().await;
|
||||
if let Some(conn_status) = status.get_mut(&source_id) {
|
||||
|
|
@ -778,15 +782,15 @@ impl ConnectionManager {
|
|||
if let Some(handle) = conn_status.poll_handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
// 停止心跳任务
|
||||
// 鍋滄蹇冭烦浠诲姟
|
||||
if let Some(handle) = conn_status.heartbeat_handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
// 停止 event_loop 任务
|
||||
// 鍋滄 event_loop 浠诲姟
|
||||
if let Some(handle) = conn_status.event_loop_handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
// 停止 event_loop 监控任务
|
||||
// 鍋滄 event_loop 鐩戞帶浠诲姟
|
||||
if let Some(handle) = conn_status.event_loop_monitor_handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
|
|
@ -810,7 +814,7 @@ impl ConnectionManager {
|
|||
let source_ids: Vec<Uuid> = self.status.read().await.keys().copied().collect();
|
||||
|
||||
for source_id in source_ids {
|
||||
// 停止轮询任务并清空轮询点列表
|
||||
// 鍋滄杞浠诲姟骞舵竻绌鸿疆璇㈢偣鍒楄〃
|
||||
{
|
||||
let mut status = self.status.write().await;
|
||||
if let Some(conn_status) = status.get_mut(&source_id) {
|
||||
|
|
@ -818,15 +822,15 @@ impl ConnectionManager {
|
|||
if let Some(handle) = conn_status.poll_handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
// 停止心跳任务
|
||||
// 鍋滄蹇冭烦浠诲姟
|
||||
if let Some(handle) = conn_status.heartbeat_handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
// 停止 event_loop 任务
|
||||
// 鍋滄 event_loop 浠诲姟
|
||||
if let Some(handle) = conn_status.event_loop_handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
// 停止 event_loop 监控任务
|
||||
// 鍋滄 event_loop 鐩戞帶浠诲姟
|
||||
if let Some(handle) = conn_status.event_loop_monitor_handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
|
|
@ -1044,9 +1048,9 @@ impl ConnectionManager {
|
|||
}
|
||||
|
||||
// Emit local updates only when the full batch succeeds.
|
||||
if let Some(event_manager) = &self.event_manager {
|
||||
if let Some(point_event_sink) = &self.point_event_sink {
|
||||
for (source_id, point_id, variant) in success_events {
|
||||
if let Err(e) = event_manager.send(crate::event::AppEvent::PointNewValue(
|
||||
if let Err(e) = point_event_sink.send_point_new_value(
|
||||
crate::telemetry::PointNewValue {
|
||||
source_id,
|
||||
point_id: Some(point_id),
|
||||
|
|
@ -1057,9 +1061,9 @@ impl ConnectionManager {
|
|||
quality: crate::telemetry::PointQuality::Good,
|
||||
protocol: "opcua".to_string(),
|
||||
timestamp: Some(Utc::now()),
|
||||
scan_mode: crate::model::ScanMode::Poll,
|
||||
scan_mode: ScanMode::Poll,
|
||||
},
|
||||
)) {
|
||||
) {
|
||||
tracing::warn!(
|
||||
"Batch write succeeded but failed to dispatch point update for point {}: {}",
|
||||
point_id,
|
||||
|
|
@ -1185,8 +1189,8 @@ impl ConnectionManager {
|
|||
.map(crate::telemetry::PointQuality::from_status_code)
|
||||
.unwrap_or(crate::telemetry::PointQuality::Good);
|
||||
|
||||
if let Some(event_manager) = &data_manager.event_manager {
|
||||
let _ = event_manager.send(crate::event::AppEvent::PointNewValue(
|
||||
if let Some(point_event_sink) = &data_manager.point_event_sink {
|
||||
let _ = point_event_sink.send_point_new_value(
|
||||
crate::telemetry::PointNewValue {
|
||||
source_id: current_source_id,
|
||||
point_id: None,
|
||||
|
|
@ -1198,7 +1202,7 @@ impl ConnectionManager {
|
|||
protocol: "opcua".to_string(),
|
||||
timestamp: timex,
|
||||
scan_mode: ScanMode::Subscribe,
|
||||
}));
|
||||
});
|
||||
}
|
||||
},
|
||||
|_event_fields, _item| {},
|
||||
|
|
@ -1465,7 +1469,7 @@ impl ConnectionManager {
|
|||
}
|
||||
}
|
||||
|
||||
// 从轮询列表中移除传入的点,并记录移除的轮询点数量
|
||||
// 浠庤疆璇㈠垪琛ㄤ腑绉婚櫎浼犲叆鐨勭偣锛屽苟璁板綍绉婚櫎鐨勮疆璇㈢偣鏁伴噺
|
||||
let polling_removed_count = {
|
||||
let mut status = self.status.write().await;
|
||||
if let Some(conn_status) = status.get_mut(&source_id) {
|
||||
|
|
@ -1478,7 +1482,7 @@ impl ConnectionManager {
|
|||
}
|
||||
};
|
||||
|
||||
// 计算从订阅点和轮询点移除的总数
|
||||
// 璁$畻浠庤闃呯偣鍜岃疆璇㈢偣绉婚櫎鐨勬€绘暟
|
||||
let total_removed = removed_point_ids.len() + polling_removed_count;
|
||||
tracing::info!(
|
||||
"Unsubscribed {} points (subscription: {}, polling: {}) from source {}",
|
||||
|
|
@ -1494,3 +1498,10 @@ impl ConnectionManager {
|
|||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,13 +1,13 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use serde_json::json;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
connection::{BatchSetPointValueReq, ConnectionManager, SetPointValueReqItem},
|
||||
telemetry::ValueType,
|
||||
};
|
||||
use serde_json::json;
|
||||
use std::sync::Arc;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Write a pulse (high → delay → low) to a command point.
|
||||
/// Returns Ok(()) on success, Err(msg) on any failure.
|
||||
pub async fn send_pulse_command(
|
||||
connection_manager: &Arc<ConnectionManager>,
|
||||
point_id: Uuid,
|
||||
|
|
@ -19,7 +19,10 @@ pub async fn send_pulse_command(
|
|||
|
||||
let high_result = connection_manager
|
||||
.write_point_values_batch(BatchSetPointValueReq {
|
||||
items: vec![SetPointValueReqItem { point_id, value: high }],
|
||||
items: vec![SetPointValueReqItem {
|
||||
point_id,
|
||||
value: high,
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
|
||||
|
|
@ -31,7 +34,10 @@ pub async fn send_pulse_command(
|
|||
|
||||
let low_result = connection_manager
|
||||
.write_point_values_batch(BatchSetPointValueReq {
|
||||
items: vec![SetPointValueReqItem { point_id, value: low }],
|
||||
items: vec![SetPointValueReqItem {
|
||||
point_id,
|
||||
value: low,
|
||||
}],
|
||||
})
|
||||
.await?;
|
||||
|
||||
|
|
@ -45,6 +51,12 @@ pub async fn send_pulse_command(
|
|||
fn pulse_value(high: bool, value_type: Option<&ValueType>) -> serde_json::Value {
|
||||
match value_type {
|
||||
Some(ValueType::Bool) => serde_json::Value::Bool(high),
|
||||
_ => if high { json!(1) } else { json!(0) },
|
||||
_ => {
|
||||
if high {
|
||||
json!(1)
|
||||
} else {
|
||||
json!(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
pub mod command;
|
||||
pub mod runtime;
|
||||
|
|
@ -19,13 +19,11 @@ pub struct UnitRuntime {
|
|||
pub state: UnitRuntimeState,
|
||||
pub auto_enabled: bool,
|
||||
pub accumulated_run_sec: i64,
|
||||
/// Snapshot updated only on state transitions; used for display to avoid mid-tick jitter.
|
||||
pub display_acc_sec: i64,
|
||||
pub fault_locked: bool,
|
||||
pub flt_active: bool,
|
||||
pub comm_locked: bool,
|
||||
pub manual_ack_required: bool,
|
||||
/// True when any equipment in the unit has REM=false (local mode) with good signal quality.
|
||||
pub rem_local: bool,
|
||||
}
|
||||
|
||||
|
|
@ -88,10 +86,9 @@ impl ControlRuntimeStore {
|
|||
self.inner.read().await.clone()
|
||||
}
|
||||
|
||||
/// Wake the engine task for a unit (e.g., when auto_enabled or fault_locked changes).
|
||||
pub async fn notify_unit(&self, unit_id: Uuid) {
|
||||
if let Some(n) = self.notifiers.read().await.get(&unit_id) {
|
||||
n.notify_one();
|
||||
if let Some(notify) = self.notifiers.read().await.get(&unit_id) {
|
||||
notify.notify_one();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct EventEnvelope {
|
||||
pub event_type: String,
|
||||
pub payload: Value,
|
||||
}
|
||||
|
||||
impl EventEnvelope {
|
||||
pub fn new(event_type: impl Into<String>, payload: Value) -> Self {
|
||||
Self {
|
||||
event_type: event_type.into(),
|
||||
payload,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
pub mod doc;
|
||||
pub mod log;
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
use axum::{
|
||||
http::{header, HeaderMap, HeaderValue, StatusCode},
|
||||
response::IntoResponse,
|
||||
};
|
||||
|
||||
use crate::util::response::ApiErr;
|
||||
|
||||
pub async fn serve_markdown(path: &str) -> Result<impl IntoResponse, ApiErr> {
|
||||
let content = tokio::fs::read_to_string(path)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
tracing::error!("Failed to read {}: {}", path, err);
|
||||
ApiErr::NotFound(format!("{} not found", path), None)
|
||||
})?;
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
HeaderValue::from_static("text/markdown; charset=utf-8"),
|
||||
);
|
||||
|
||||
Ok((StatusCode::OK, headers, content))
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use std::{
|
||||
use std::{
|
||||
convert::Infallible,
|
||||
path::{Path, PathBuf},
|
||||
time::SystemTime,
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
pub mod bootstrap;
|
||||
pub mod connection;
|
||||
pub mod control;
|
||||
pub mod db;
|
||||
pub mod event;
|
||||
pub mod handler;
|
||||
pub mod model;
|
||||
pub mod platform_context;
|
||||
pub mod service;
|
||||
pub mod telemetry;
|
||||
pub mod util;
|
||||
pub mod websocket;
|
||||
|
||||
pub use event::EventEnvelope;
|
||||
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PlatformContext {
|
||||
pub config_name: Arc<str>,
|
||||
}
|
||||
|
||||
impl PlatformContext {
|
||||
pub fn new(config_name: impl Into<Arc<str>>) -> Self {
|
||||
Self {
|
||||
config_name: config_name.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use crate::model::{ControlUnit, EventRecord};
|
||||
use crate::model::{ControlUnit, EventRecord};
|
||||
use sqlx::{PgPool, QueryBuilder, Row};
|
||||
use uuid::Uuid;
|
||||
|
||||
|
|
@ -507,3 +507,4 @@ pub async fn get_equipment_role_points(
|
|||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
|
|
@ -1,10 +1,13 @@
|
|||
use crate::{
|
||||
handler::equipment::EquipmentListItem,
|
||||
model::{Equipment, Point},
|
||||
};
|
||||
use crate::model::{Equipment, Point};
|
||||
use sqlx::{query_as, PgPool, Row};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EquipmentListItem {
|
||||
pub equipment: Equipment,
|
||||
pub point_count: i64,
|
||||
}
|
||||
|
||||
fn equipment_order_clause() -> &'static str {
|
||||
"e.code"
|
||||
}
|
||||
|
|
@ -152,7 +155,6 @@ pub async fn get_equipment_paginated(
|
|||
updated_at: row.get("updated_at"),
|
||||
},
|
||||
point_count: row.get::<i64, _>("point_count"),
|
||||
role_points: vec![],
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
|
@ -307,3 +309,7 @@ mod tests {
|
|||
assert_eq!(equipment_order_clause(), "e.code");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use crate::model::{Point, PointSubscriptionInfo};
|
||||
use crate::model::{Point, PointSubscriptionInfo};
|
||||
use sqlx::{query_as, PgPool, Row};
|
||||
use std::collections::HashMap;
|
||||
|
||||
|
|
@ -283,3 +283,4 @@ pub async fn get_points_paginated(
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use crate::model::Source;
|
||||
use crate::model::Source;
|
||||
use sqlx::{query_as, PgPool};
|
||||
|
||||
pub async fn get_enabled_source(
|
||||
|
|
@ -16,3 +16,4 @@ pub async fn get_all_enabled_sources(pool: &PgPool) -> Result<Vec<Source>, sqlx:
|
|||
.fetch_all(pool)
|
||||
.await
|
||||
}
|
||||
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
use crate::model::{Point, Tag};
|
||||
use crate::model::{Point, Tag};
|
||||
use sqlx::{query_as, PgPool};
|
||||
|
||||
pub async fn get_tags_count(pool: &PgPool) -> Result<i64, sqlx::Error> {
|
||||
|
|
@ -182,3 +182,4 @@ pub async fn delete_tag(pool: &PgPool, tag_id: uuid::Uuid) -> Result<bool, sqlx:
|
|||
|
||||
Ok(result.rows_affected() > 0)
|
||||
}
|
||||
|
||||
|
|
@ -1,9 +1,8 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use chrono::{DateTime, Utc};
|
||||
use crate::model::ScanMode;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::model::ScanMode;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ValueType {
|
||||
|
|
@ -152,3 +151,4 @@ pub fn opcua_variant_type(value: &opcua::types::Variant) -> ValueType {
|
|||
_ => ValueType::Text,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
use std::sync::OnceLock;
|
||||
use time::UtcOffset;
|
||||
use tracing_subscriber::{fmt, prelude::*, EnvFilter};
|
||||
use tracing_appender::{rolling, non_blocking};
|
||||
use time::UtcOffset;
|
||||
|
||||
static LOG_GUARD: OnceLock<non_blocking::WorkerGuard> = OnceLock::new();
|
||||
|
||||
|
|
@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize};
|
|||
use serde_with::serde_as;
|
||||
use validator::Validate;
|
||||
|
||||
/// 分页响应结构
|
||||
/// Paginated API response payload.
|
||||
#[derive(Serialize)]
|
||||
pub struct PaginatedResponse<T> {
|
||||
pub data: Vec<T>,
|
||||
|
|
@ -13,7 +13,7 @@ pub struct PaginatedResponse<T> {
|
|||
}
|
||||
|
||||
impl<T> PaginatedResponse<T> {
|
||||
/// 创建分页响应
|
||||
/// Build a paginated response from rows and total count.
|
||||
pub fn new(data: Vec<T>, total: i64, page: u32, page_size: i32) -> Self {
|
||||
let total_pages = if page_size > 0 {
|
||||
((total as f64) / (page_size as f64)).ceil() as u32
|
||||
|
|
@ -31,7 +31,7 @@ impl<T> PaginatedResponse<T> {
|
|||
}
|
||||
}
|
||||
|
||||
/// 分页查询参数
|
||||
/// Common pagination query parameters.
|
||||
#[serde_as]
|
||||
#[derive(Debug, Deserialize, Validate)]
|
||||
pub struct PaginationParams {
|
||||
|
|
@ -54,7 +54,7 @@ fn default_page_size() -> i32 {
|
|||
}
|
||||
|
||||
impl PaginationParams {
|
||||
/// 计算偏移量
|
||||
/// Row offset derived from the current page and page size.
|
||||
pub fn offset(&self) -> u32 {
|
||||
(self.page - 1) * self.page_size.max(0) as u32
|
||||
}
|
||||
|
|
@ -80,7 +80,7 @@ impl From<SqlxError> for ApiErr {
|
|||
}
|
||||
SqlxError::Database(db_err) => {
|
||||
if db_err.code().as_deref() == Some("23505") {
|
||||
ApiErr::BadRequest("数据已存在".into(), None)
|
||||
ApiErr::BadRequest("data already exists".into(), None)
|
||||
} else {
|
||||
tracing::error!("Database error: {}", db_err);
|
||||
ApiErr::Internal("Database error".into(), None)
|
||||
|
|
@ -141,4 +141,3 @@ impl From<FormRejection> for ApiErr {
|
|||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -4,9 +4,9 @@ use validator::ValidationErrors;
|
|||
|
||||
impl From<ValidationErrors> for ApiErr {
|
||||
fn from(errors: ValidationErrors) -> Self {
|
||||
// 构建详细的错误信息
|
||||
// Build a detailed validation error message.
|
||||
let mut error_details = serde_json::Map::new();
|
||||
let mut first_error_msg = String::from("请求参数验证失败");
|
||||
let mut first_error_msg = String::from("鐠囬攱鐪伴崣鍌涙殶妤犲矁鐦夋径杈Е");
|
||||
|
||||
for (field, field_errors) in errors.field_errors() {
|
||||
let error_list: Vec<String> = field_errors
|
||||
|
|
@ -19,8 +19,8 @@ impl From<ValidationErrors> for ApiErr {
|
|||
.collect();
|
||||
error_details.insert(field.to_string(), json!(error_list));
|
||||
|
||||
// 获取第一个字段的第一个错误信息
|
||||
if first_error_msg == "请求参数验证失败" && !error_list.is_empty() {
|
||||
// Use the first available field error as the summary.
|
||||
if first_error_msg == "鐠囬攱鐪伴崣鍌涙殶妤犲矁鐦夋径杈Е" && !error_list.is_empty() {
|
||||
if let Some(msg) = field_errors[0].message.as_ref() {
|
||||
first_error_msg = format!("{}: {}", field, msg);
|
||||
} else {
|
||||
|
|
@ -0,0 +1,128 @@
|
|||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::{broadcast, RwLock};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
connection::{BatchSetPointValueReq, BatchSetPointValueRes},
|
||||
control::runtime::UnitRuntime,
|
||||
model::EventRecord,
|
||||
telemetry::PointMonitorInfo,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", content = "data")]
|
||||
pub enum WsMessage {
|
||||
PointNewValue(PointMonitorInfo),
|
||||
PointSetValueBatchResult(BatchSetPointValueRes),
|
||||
EventCreated(EventRecord),
|
||||
UnitRuntimeChanged(UnitRuntime),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", content = "data", rename_all = "snake_case")]
|
||||
pub enum WsClientMessage {
|
||||
AuthWrite(WsAuthWriteReq),
|
||||
PointSetValueBatch(BatchSetPointValueReq),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct WsAuthWriteReq {
|
||||
pub key: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RoomManager {
|
||||
rooms: Arc<RwLock<HashMap<String, broadcast::Sender<WsMessage>>>>,
|
||||
}
|
||||
|
||||
impl RoomManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
rooms: Arc::new(RwLock::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_or_create_room(&self, room_id: &str) -> broadcast::Sender<WsMessage> {
|
||||
let mut rooms = self.rooms.write().await;
|
||||
|
||||
if let Some(sender) = rooms.get(room_id) {
|
||||
return sender.clone();
|
||||
}
|
||||
|
||||
let (sender, _) = broadcast::channel(100);
|
||||
rooms.insert(room_id.to_string(), sender.clone());
|
||||
tracing::info!("Created new room: {}", room_id);
|
||||
sender
|
||||
}
|
||||
|
||||
pub async fn get_room(&self, room_id: &str) -> Option<broadcast::Sender<WsMessage>> {
|
||||
let rooms = self.rooms.read().await;
|
||||
rooms.get(room_id).cloned()
|
||||
}
|
||||
|
||||
pub async fn remove_room_if_empty(&self, room_id: &str) {
|
||||
let mut rooms = self.rooms.write().await;
|
||||
let should_remove = rooms
|
||||
.get(room_id)
|
||||
.map(|sender| sender.receiver_count() == 0)
|
||||
.unwrap_or(false);
|
||||
|
||||
if should_remove {
|
||||
rooms.remove(room_id);
|
||||
tracing::info!("Removed empty room: {}", room_id);
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_to_room(&self, room_id: &str, message: WsMessage) -> Result<usize, String> {
|
||||
if let Some(sender) = self.get_room(room_id).await {
|
||||
match sender.send(message) {
|
||||
Ok(count) => Ok(count),
|
||||
Err(broadcast::error::SendError(_)) => Ok(0),
|
||||
}
|
||||
} else {
|
||||
Ok(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for RoomManager {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct WebSocketManager {
|
||||
public_room: Arc<RoomManager>,
|
||||
}
|
||||
|
||||
impl WebSocketManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
public_room: Arc::new(RoomManager::new()),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_to_public(&self, message: WsMessage) -> Result<usize, String> {
|
||||
self.public_room.get_or_create_room("public").await;
|
||||
self.public_room.send_to_room("public", message).await
|
||||
}
|
||||
|
||||
pub async fn send_to_client(
|
||||
&self,
|
||||
client_id: Uuid,
|
||||
message: WsMessage,
|
||||
) -> Result<usize, String> {
|
||||
self.public_room
|
||||
.send_to_room(&client_id.to_string(), message)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for WebSocketManager {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
use plc_platform_core::bootstrap::bootstrap_platform;
|
||||
use plc_platform_core::platform_context::PlatformContext;
|
||||
|
||||
#[test]
|
||||
fn platform_context_type_is_public() {
|
||||
let context = bootstrap_platform();
|
||||
assert_eq!(context.config_name.as_ref(), "bootstrap");
|
||||
|
||||
fn assert_send_sync_clone<T: Send + Sync + Clone>() {}
|
||||
assert_send_sync_clone::<PlatformContext>();
|
||||
}
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
use plc_platform_core::event::EventEnvelope;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn namespaced_event_types_keep_their_prefix() {
|
||||
let envelope = EventEnvelope::new(
|
||||
"feeder.auto_control_started",
|
||||
json!({"unit_id": "00000000-0000-0000-0000-000000000000"}),
|
||||
);
|
||||
|
||||
assert!(envelope.event_type.starts_with("feeder."));
|
||||
assert_eq!(
|
||||
envelope.payload["unit_id"],
|
||||
"00000000-0000-0000-0000-000000000000"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn event_namespaces_match_the_supported_apps() {
|
||||
let supported = [
|
||||
"platform.source_connected",
|
||||
"feeder.auto_control_started",
|
||||
"ops.unit_started",
|
||||
];
|
||||
|
||||
for name in supported {
|
||||
assert!(name.contains('.'));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
use plc_platform_core::model::Equipment;
|
||||
|
||||
#[test]
|
||||
fn equipment_type_is_public() {
|
||||
let _equipment: Option<Equipment> = None;
|
||||
}
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use plc_platform_core::control::runtime::{ControlRuntimeStore, UnitRuntimeState};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[tokio::test]
|
||||
async fn runtime_store_exposes_shared_runtime_surface() {
|
||||
let unit_id = Uuid::new_v4();
|
||||
let store = ControlRuntimeStore::new();
|
||||
|
||||
let initial = store.get_or_init(unit_id).await;
|
||||
assert_eq!(initial.unit_id, unit_id);
|
||||
assert_eq!(initial.state, UnitRuntimeState::Stopped);
|
||||
assert!(!initial.auto_enabled);
|
||||
assert_eq!(
|
||||
serde_json::to_string(&UnitRuntimeState::Stopped).unwrap(),
|
||||
"\"stopped\""
|
||||
);
|
||||
|
||||
let notify = store.get_or_create_notify(unit_id).await;
|
||||
let waiter = tokio::spawn(async move {
|
||||
tokio::time::timeout(Duration::from_millis(50), notify.notified()).await
|
||||
});
|
||||
|
||||
let mut updated = initial.clone();
|
||||
updated.auto_enabled = true;
|
||||
updated.state = UnitRuntimeState::Running;
|
||||
store.upsert(updated.clone()).await;
|
||||
store.notify_unit(unit_id).await;
|
||||
|
||||
let notified = waiter.await.unwrap();
|
||||
assert!(notified.is_ok(), "unit notifier should wake waiters");
|
||||
|
||||
let persisted = store.get(unit_id).await.expect("runtime should exist");
|
||||
assert_eq!(persisted.state, UnitRuntimeState::Running);
|
||||
assert!(persisted.auto_enabled);
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
use plc_platform_core::service::EquipmentRolePoint;
|
||||
|
||||
#[test]
|
||||
fn service_type_is_public() {
|
||||
let _role_point: Option<EquipmentRolePoint> = None;
|
||||
}
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# 运转系统 API
|
||||
|
||||
## 健康检查
|
||||
|
||||
- `GET /api/health` — 返回应用名称和状态
|
||||
|
||||
## 日志
|
||||
|
||||
- `GET /api/logs` — 拉取日志内容
|
||||
- `GET /api/logs/stream` — SSE 增量推送
|
||||
|
||||
## 文档
|
||||
|
||||
- `GET /api/docs/api-md` — 获取 API 文档
|
||||
- `GET /api/docs/readme-md` — 获取 README
|
||||
|
|
@ -0,0 +1,887 @@
|
|||
# Dual App Shared Core Implementation Plan
|
||||
|
||||
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
|
||||
**Goal:** Restructure the repository into a Cargo workspace with one shared Rust platform library and two app binaries, while preserving the current feeder/distributor behavior and preparing a second operation-system app.
|
||||
|
||||
**Architecture:** Keep the current feeder/distributor business logic as the first concrete app, extract stable platform modules into `plc_platform_core`, and add `app_operation_system` as a second binary with its own routes, state, and static assets. Store platform and business events in the same `event` table using namespaced `event_type` values.
|
||||
|
||||
**Tech Stack:** Rust 2021, Cargo workspace, Tokio, Axum, SQLx, async-opcua, WebSocket, existing web static assets
|
||||
|
||||
---
|
||||
|
||||
## File Map
|
||||
|
||||
### New workspace-level files
|
||||
|
||||
- Create: `crates/plc_platform_core/Cargo.toml`
|
||||
- Create: `crates/plc_platform_core/src/lib.rs`
|
||||
- Create: `crates/plc_platform_core/src/bootstrap.rs`
|
||||
- Create: `crates/plc_platform_core/src/platform_context.rs`
|
||||
- Create: `crates/app_feeder_distributor/Cargo.toml`
|
||||
- Create: `crates/app_feeder_distributor/src/main.rs`
|
||||
- Create: `crates/app_feeder_distributor/src/app.rs`
|
||||
- Create: `crates/app_feeder_distributor/src/router.rs`
|
||||
- Create: `crates/app_operation_system/Cargo.toml`
|
||||
- Create: `crates/app_operation_system/src/main.rs`
|
||||
- Create: `crates/app_operation_system/src/app.rs`
|
||||
- Create: `crates/app_operation_system/src/router.rs`
|
||||
|
||||
### Existing files that move into the shared crate
|
||||
|
||||
- Modify then move: `src/config.rs`
|
||||
- Modify then move: `src/db.rs`
|
||||
- Modify then move: `src/model.rs`
|
||||
- Modify then move: `src/connection.rs`
|
||||
- Modify then move: `src/telemetry.rs`
|
||||
- Modify then move: `src/event.rs`
|
||||
- Modify then move: `src/websocket.rs`
|
||||
- Modify then move: `src/service.rs`
|
||||
- Modify then move: `src/service/*.rs`
|
||||
- Modify then move: `src/util.rs`
|
||||
- Modify then move: `src/util/*.rs`
|
||||
- Modify then move: `src/control/command.rs`
|
||||
- Modify then move: `src/control/runtime.rs`
|
||||
|
||||
### Existing files that stay in the feeder/distributor app
|
||||
|
||||
- Modify and keep: `src/control/engine.rs`
|
||||
- Modify and keep: `src/control/simulate.rs`
|
||||
- Modify and keep: `src/handler/control.rs`
|
||||
- Modify and keep: `web/**`
|
||||
|
||||
### New or modified tests
|
||||
|
||||
- Create: `crates/plc_platform_core/tests/bootstrap_smoke.rs`
|
||||
- Create: `crates/plc_platform_core/tests/event_namespace.rs`
|
||||
- Create: `crates/app_feeder_distributor/tests/router_smoke.rs`
|
||||
- Create: `crates/app_operation_system/tests/router_smoke.rs`
|
||||
|
||||
## Task 1: Convert The Repository Into A Workspace
|
||||
|
||||
**Files:**
|
||||
- Modify: `Cargo.toml`
|
||||
- Create: `crates/plc_platform_core/Cargo.toml`
|
||||
- Create: `crates/app_feeder_distributor/Cargo.toml`
|
||||
- Create: `crates/app_operation_system/Cargo.toml`
|
||||
|
||||
- [ ] **Step 1: Write the failing workspace metadata test via `cargo metadata`**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo metadata --no-deps
|
||||
```
|
||||
|
||||
Expected:
|
||||
- FAIL or output only one package instead of the three planned workspace members
|
||||
|
||||
- [ ] **Step 2: Replace the root `Cargo.toml` with workspace metadata**
|
||||
|
||||
```toml
|
||||
[workspace]
|
||||
members = [
|
||||
"crates/plc_platform_core",
|
||||
"crates/app_feeder_distributor",
|
||||
"crates/app_operation_system",
|
||||
]
|
||||
resolver = "2"
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Add the shared crate manifest**
|
||||
|
||||
```toml
|
||||
[package]
|
||||
name = "plc_platform_core"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1.49", features = ["full"] }
|
||||
axum = { version = "0.8", features = ["ws"] }
|
||||
tower-http = { version = "0.6", features = ["cors", "fs"] }
|
||||
sqlx = { version = "0.8", features = ["runtime-tokio", "postgres", "chrono", "uuid", "json"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde_with = "3.0"
|
||||
async-stream = "0.3"
|
||||
chrono = "0.4"
|
||||
time = "0.3"
|
||||
uuid = { version = "1.21", features = ["serde", "v4"] }
|
||||
async-opcua = { version = "0.18", features = ["client"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "time", "json"] }
|
||||
tracing-appender = "0.2"
|
||||
dotenv = "0.15"
|
||||
validator = { version = "0.20", features = ["derive"] }
|
||||
anyhow = "1.0"
|
||||
fs2 = "0.4"
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Add the feeder/distributor app manifest**
|
||||
|
||||
```toml
|
||||
[package]
|
||||
name = "app_feeder_distributor"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
plc_platform_core = { path = "../plc_platform_core" }
|
||||
tokio = { version = "1.49", features = ["full"] }
|
||||
axum = { version = "0.8", features = ["ws"] }
|
||||
tower-http = { version = "0.6", features = ["cors", "fs"] }
|
||||
tracing = "0.1"
|
||||
dotenv = "0.15"
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
tray-icon = "0.15"
|
||||
winit = "0.30"
|
||||
webbrowser = "0.8"
|
||||
```
|
||||
|
||||
- [ ] **Step 5: Add the operation-system app manifest**
|
||||
|
||||
```toml
|
||||
[package]
|
||||
name = "app_operation_system"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
plc_platform_core = { path = "../plc_platform_core" }
|
||||
tokio = { version = "1.49", features = ["full"] }
|
||||
axum = { version = "0.8", features = ["ws"] }
|
||||
tower-http = { version = "0.6", features = ["cors", "fs"] }
|
||||
tracing = "0.1"
|
||||
dotenv = "0.15"
|
||||
```
|
||||
|
||||
- [ ] **Step 6: Run metadata again to verify the workspace shape**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo metadata --no-deps
|
||||
```
|
||||
|
||||
Expected:
|
||||
- PASS
|
||||
- Output includes `plc_platform_core`, `app_feeder_distributor`, and `app_operation_system`
|
||||
|
||||
- [ ] **Step 7: Commit**
|
||||
|
||||
```powershell
|
||||
git add Cargo.toml crates/plc_platform_core/Cargo.toml crates/app_feeder_distributor/Cargo.toml crates/app_operation_system/Cargo.toml
|
||||
git commit -m "build(workspace): add dual-app workspace manifests"
|
||||
```
|
||||
|
||||
## Task 2: Introduce The Shared Core Skeleton
|
||||
|
||||
**Files:**
|
||||
- Create: `crates/plc_platform_core/src/lib.rs`
|
||||
- Create: `crates/plc_platform_core/src/platform_context.rs`
|
||||
- Create: `crates/plc_platform_core/src/bootstrap.rs`
|
||||
- Test: `crates/plc_platform_core/tests/bootstrap_smoke.rs`
|
||||
|
||||
- [ ] **Step 1: Write the failing shared-core smoke test**
|
||||
|
||||
```rust
|
||||
use plc_platform_core::platform_context::PlatformContext;
|
||||
|
||||
#[test]
|
||||
fn platform_context_type_is_public() {
|
||||
fn assert_send_sync_clone<T: Send + Sync + Clone>() {}
|
||||
assert_send_sync_clone::<PlatformContext>();
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run the test to verify it fails**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p plc_platform_core platform_context_type_is_public -- --exact
|
||||
```
|
||||
|
||||
Expected:
|
||||
- FAIL with unresolved import or missing type errors
|
||||
|
||||
- [ ] **Step 3: Add the shared-core public module surface**
|
||||
|
||||
```rust
|
||||
pub mod bootstrap;
|
||||
pub mod platform_context;
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Add the initial `PlatformContext` type**
|
||||
|
||||
```rust
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PlatformContext {
|
||||
pub config_name: Arc<str>,
|
||||
}
|
||||
|
||||
impl PlatformContext {
|
||||
pub fn new(config_name: impl Into<Arc<str>>) -> Self {
|
||||
Self {
|
||||
config_name: config_name.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 5: Add a minimal bootstrap module**
|
||||
|
||||
```rust
|
||||
use crate::platform_context::PlatformContext;
|
||||
|
||||
pub fn bootstrap_platform_for_tests() -> PlatformContext {
|
||||
PlatformContext::new("test")
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 6: Run the shared-core test to verify it passes**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p plc_platform_core platform_context_type_is_public -- --exact
|
||||
```
|
||||
|
||||
Expected:
|
||||
- PASS
|
||||
|
||||
- [ ] **Step 7: Commit**
|
||||
|
||||
```powershell
|
||||
git add crates/plc_platform_core/src/lib.rs crates/plc_platform_core/src/platform_context.rs crates/plc_platform_core/src/bootstrap.rs crates/plc_platform_core/tests/bootstrap_smoke.rs
|
||||
git commit -m "feat(core): add shared platform skeleton"
|
||||
```
|
||||
|
||||
## Task 3: Move Stable Utility And Model Modules Into The Shared Core
|
||||
|
||||
**Files:**
|
||||
- Create or move: `crates/plc_platform_core/src/model.rs`
|
||||
- Create or move: `crates/plc_platform_core/src/util.rs`
|
||||
- Create or move: `crates/plc_platform_core/src/util/*.rs`
|
||||
- Modify: `crates/plc_platform_core/src/lib.rs`
|
||||
- Modify: imports in the current business code that referenced `crate::model` or `crate::util`
|
||||
|
||||
- [ ] **Step 1: Write a failing model import test**
|
||||
|
||||
```rust
|
||||
use plc_platform_core::model::Equipment;
|
||||
|
||||
#[test]
|
||||
fn equipment_model_is_exposed_from_shared_core() {
|
||||
let _ = std::mem::size_of::<Equipment>();
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run the test to verify it fails**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p plc_platform_core equipment_model_is_exposed_from_shared_core -- --exact
|
||||
```
|
||||
|
||||
Expected:
|
||||
- FAIL with missing `model` module or `Equipment` type
|
||||
|
||||
- [ ] **Step 3: Move `src/model.rs` and `src/util*` into `plc_platform_core` and export them**
|
||||
|
||||
```rust
|
||||
pub mod bootstrap;
|
||||
pub mod model;
|
||||
pub mod platform_context;
|
||||
pub mod util;
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Update downstream imports to the shared path**
|
||||
|
||||
```rust
|
||||
use plc_platform_core::model::Equipment;
|
||||
use plc_platform_core::util::response::ApiResponse;
|
||||
```
|
||||
|
||||
- [ ] **Step 5: Run focused tests and a type check**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p plc_platform_core equipment_model_is_exposed_from_shared_core -- --exact
|
||||
cargo check -p app_feeder_distributor
|
||||
```
|
||||
|
||||
Expected:
|
||||
- PASS for the test
|
||||
- `cargo check` succeeds or reports only the next modules that still need moving
|
||||
|
||||
- [ ] **Step 6: Commit**
|
||||
|
||||
```powershell
|
||||
git add crates/plc_platform_core/src/lib.rs crates/plc_platform_core/src/model.rs crates/plc_platform_core/src/util.rs crates/plc_platform_core/src/util app_feeder_distributor
|
||||
git commit -m "refactor(core): move model and util modules into shared crate"
|
||||
```
|
||||
|
||||
## Task 4: Move Database, Service, Telemetry, And Connection Modules Into The Shared Core
|
||||
|
||||
**Files:**
|
||||
- Move: `src/db.rs`
|
||||
- Move: `src/service.rs`
|
||||
- Move: `src/service/*.rs`
|
||||
- Move: `src/telemetry.rs`
|
||||
- Move: `src/connection.rs`
|
||||
- Modify: `crates/plc_platform_core/src/lib.rs`
|
||||
- Modify: downstream imports in both apps
|
||||
|
||||
- [ ] **Step 1: Write a failing service API exposure test**
|
||||
|
||||
```rust
|
||||
use plc_platform_core::service::EquipmentRolePoint;
|
||||
|
||||
#[test]
|
||||
fn service_types_are_public_from_shared_core() {
|
||||
let _ = std::mem::size_of::<EquipmentRolePoint>();
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run the test to verify it fails**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p plc_platform_core service_types_are_public_from_shared_core -- --exact
|
||||
```
|
||||
|
||||
Expected:
|
||||
- FAIL with unresolved module or type
|
||||
|
||||
- [ ] **Step 3: Move and export the stable platform modules**
|
||||
|
||||
```rust
|
||||
pub mod bootstrap;
|
||||
pub mod connection;
|
||||
pub mod db;
|
||||
pub mod model;
|
||||
pub mod platform_context;
|
||||
pub mod service;
|
||||
pub mod telemetry;
|
||||
pub mod util;
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Update feeder imports to use the shared crate**
|
||||
|
||||
```rust
|
||||
use plc_platform_core::connection::ConnectionManager;
|
||||
use plc_platform_core::service::get_all_enabled_sources;
|
||||
use plc_platform_core::telemetry::PointMonitorInfo;
|
||||
```
|
||||
|
||||
- [ ] **Step 5: Run focused verification**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p plc_platform_core service_types_are_public_from_shared_core -- --exact
|
||||
cargo check -p app_feeder_distributor
|
||||
```
|
||||
|
||||
Expected:
|
||||
- PASS for the test
|
||||
- Feeder app type-checks after import updates
|
||||
|
||||
- [ ] **Step 6: Commit**
|
||||
|
||||
```powershell
|
||||
git add crates/plc_platform_core/src/db.rs crates/plc_platform_core/src/service.rs crates/plc_platform_core/src/service crates/plc_platform_core/src/telemetry.rs crates/plc_platform_core/src/connection.rs crates/plc_platform_core/src/lib.rs crates/app_feeder_distributor
|
||||
git commit -m "refactor(core): move platform data and connection modules"
|
||||
```
|
||||
|
||||
## Task 5: Split The Event Layer Into Platform Infrastructure Plus Namespaced Business Events
|
||||
|
||||
**Files:**
|
||||
- Move and modify: `src/event.rs`
|
||||
- Create: `crates/plc_platform_core/tests/event_namespace.rs`
|
||||
- Modify: feeder business event call sites
|
||||
|
||||
- [ ] **Step 1: Write a failing event namespace test**
|
||||
|
||||
```rust
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn namespaced_event_types_keep_their_prefix() {
|
||||
let event_type = "feeder.auto_control_started";
|
||||
let payload = json!({"unit_id": "00000000-0000-0000-0000-000000000000"});
|
||||
assert!(event_type.starts_with("feeder."));
|
||||
assert_eq!(payload["unit_id"], "00000000-0000-0000-0000-000000000000");
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Add a shared platform event record type and publisher surface**
|
||||
|
||||
```rust
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
pub struct EventEnvelope {
|
||||
pub event_type: String,
|
||||
pub payload: serde_json::Value,
|
||||
}
|
||||
|
||||
impl EventEnvelope {
|
||||
pub fn new(event_type: impl Into<String>, payload: serde_json::Value) -> Self {
|
||||
Self {
|
||||
event_type: event_type.into(),
|
||||
payload,
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Convert feeder-specific events into explicit envelopes at the call site**
|
||||
|
||||
```rust
|
||||
use plc_platform_core::event::EventEnvelope;
|
||||
use serde_json::json;
|
||||
|
||||
let event = EventEnvelope::new(
|
||||
"feeder.auto_control_started",
|
||||
json!({ "unit_id": unit_id }),
|
||||
);
|
||||
state.platform.event_manager.publish(event)?;
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Run the shared-core tests and feeder checks**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p plc_platform_core namespaced_event_types_keep_their_prefix -- --exact
|
||||
cargo check -p app_feeder_distributor
|
||||
```
|
||||
|
||||
Expected:
|
||||
- PASS
|
||||
- Feeder app compiles with event namespace updates
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```powershell
|
||||
git add crates/plc_platform_core/src/event.rs crates/plc_platform_core/tests/event_namespace.rs crates/app_feeder_distributor
|
||||
git commit -m "refactor(events): add shared event envelopes with namespaces"
|
||||
```
|
||||
|
||||
## Task 6: Move WebSocket, Runtime, And Command Infrastructure Into The Shared Core
|
||||
|
||||
**Files:**
|
||||
- Move: `src/websocket.rs`
|
||||
- Move: `src/control/runtime.rs`
|
||||
- Move: `src/control/command.rs`
|
||||
- Modify: `crates/plc_platform_core/src/lib.rs`
|
||||
- Modify: feeder engine imports
|
||||
|
||||
- [ ] **Step 1: Write a failing runtime exposure test**
|
||||
|
||||
```rust
|
||||
use plc_platform_core::control::runtime::UnitRuntimeState;
|
||||
|
||||
#[test]
|
||||
fn runtime_state_is_exposed_from_shared_core() {
|
||||
assert_eq!(serde_json::to_string(&UnitRuntimeState::Stopped).unwrap(), "\"stopped\"");
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run the test to verify it fails**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p plc_platform_core runtime_state_is_exposed_from_shared_core -- --exact
|
||||
```
|
||||
|
||||
Expected:
|
||||
- FAIL with unresolved module path
|
||||
|
||||
- [ ] **Step 3: Add the shared `control` module surface**
|
||||
|
||||
```rust
|
||||
pub mod command;
|
||||
pub mod runtime;
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Export `control` from the shared crate root**
|
||||
|
||||
```rust
|
||||
pub mod bootstrap;
|
||||
pub mod connection;
|
||||
pub mod control;
|
||||
pub mod db;
|
||||
pub mod event;
|
||||
pub mod model;
|
||||
pub mod platform_context;
|
||||
pub mod service;
|
||||
pub mod telemetry;
|
||||
pub mod util;
|
||||
pub mod websocket;
|
||||
```
|
||||
|
||||
- [ ] **Step 5: Update feeder imports**
|
||||
|
||||
```rust
|
||||
use plc_platform_core::control::command::send_pulse_command;
|
||||
use plc_platform_core::control::runtime::{ControlRuntimeStore, UnitRuntime, UnitRuntimeState};
|
||||
use plc_platform_core::websocket::WsMessage;
|
||||
```
|
||||
|
||||
- [ ] **Step 6: Run verification**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p plc_platform_core runtime_state_is_exposed_from_shared_core -- --exact
|
||||
cargo check -p app_feeder_distributor
|
||||
```
|
||||
|
||||
Expected:
|
||||
- PASS
|
||||
- Feeder app compiles with shared runtime and command imports
|
||||
|
||||
- [ ] **Step 7: Commit**
|
||||
|
||||
```powershell
|
||||
git add crates/plc_platform_core/src/control crates/plc_platform_core/src/websocket.rs crates/plc_platform_core/src/lib.rs crates/app_feeder_distributor
|
||||
git commit -m "refactor(core): move websocket runtime and command infrastructure"
|
||||
```
|
||||
|
||||
## Task 7: Build The Feeder/Distributor App Crate Around The Current Business Logic
|
||||
|
||||
**Files:**
|
||||
- Create: `crates/app_feeder_distributor/src/main.rs`
|
||||
- Create: `crates/app_feeder_distributor/src/app.rs`
|
||||
- Create: `crates/app_feeder_distributor/src/router.rs`
|
||||
- Move or copy business modules: feeder `handler`, feeder `control`, feeder `web`
|
||||
- Test: `crates/app_feeder_distributor/tests/router_smoke.rs`
|
||||
|
||||
- [ ] **Step 1: Write the failing feeder router smoke test**
|
||||
|
||||
```rust
|
||||
use axum::Router;
|
||||
|
||||
#[test]
|
||||
fn feeder_router_builds() {
|
||||
fn assert_router(_: Router) {}
|
||||
assert_router(app_feeder_distributor::router::build_router_for_tests());
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run the test to verify it fails**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p app_feeder_distributor feeder_router_builds -- --exact
|
||||
```
|
||||
|
||||
Expected:
|
||||
- FAIL with missing library module or router function
|
||||
|
||||
- [ ] **Step 3: Add the feeder app state and router shell**
|
||||
|
||||
```rust
|
||||
use std::sync::Arc;
|
||||
use plc_platform_core::platform_context::PlatformContext;
|
||||
use plc_platform_core::control::runtime::ControlRuntimeStore;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FeederAppState {
|
||||
pub platform: Arc<PlatformContext>,
|
||||
pub runtime: Arc<ControlRuntimeStore>,
|
||||
}
|
||||
```
|
||||
|
||||
```rust
|
||||
use axum::Router;
|
||||
|
||||
pub fn build_router_for_tests() -> Router {
|
||||
Router::new()
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Move the current feeder/distributor business modules into the new crate and wire them to `FeederAppState`**
|
||||
|
||||
```rust
|
||||
pub mod control;
|
||||
pub mod handler;
|
||||
pub mod router;
|
||||
```
|
||||
|
||||
```rust
|
||||
use crate::app::FeederAppState;
|
||||
use axum::Router;
|
||||
|
||||
pub fn build_router(state: FeederAppState) -> Router {
|
||||
Router::new().with_state(state)
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 5: Add the feeder binary entrypoint**
|
||||
|
||||
```rust
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
dotenv::dotenv().ok();
|
||||
let _ = app_feeder_distributor::app::run().await;
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 6: Run the feeder test and type-check**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p app_feeder_distributor feeder_router_builds -- --exact
|
||||
cargo check -p app_feeder_distributor
|
||||
```
|
||||
|
||||
Expected:
|
||||
- PASS
|
||||
- The feeder app builds as a standalone binary crate
|
||||
|
||||
- [ ] **Step 7: Commit**
|
||||
|
||||
```powershell
|
||||
git add crates/app_feeder_distributor
|
||||
git commit -m "feat(feeder): create dedicated feeder distributor app crate"
|
||||
```
|
||||
|
||||
## Task 8: Add The Operation-System App Skeleton With Its Own State, Router, And Web Root
|
||||
|
||||
**Files:**
|
||||
- Create: `crates/app_operation_system/src/main.rs`
|
||||
- Create: `crates/app_operation_system/src/app.rs`
|
||||
- Create: `crates/app_operation_system/src/router.rs`
|
||||
- Create: `crates/app_operation_system/web/index.html`
|
||||
- Test: `crates/app_operation_system/tests/router_smoke.rs`
|
||||
|
||||
- [ ] **Step 1: Write the failing operation-system router smoke test**
|
||||
|
||||
```rust
|
||||
use axum::Router;
|
||||
|
||||
#[test]
|
||||
fn operation_router_builds() {
|
||||
fn assert_router(_: Router) {}
|
||||
assert_router(app_operation_system::router::build_router_for_tests());
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run the test to verify it fails**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p app_operation_system operation_router_builds -- --exact
|
||||
```
|
||||
|
||||
Expected:
|
||||
- FAIL with missing module or function
|
||||
|
||||
- [ ] **Step 3: Add the operation-system app state and router shell**
|
||||
|
||||
```rust
|
||||
use std::sync::Arc;
|
||||
use plc_platform_core::platform_context::PlatformContext;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OperationAppState {
|
||||
pub platform: Arc<PlatformContext>,
|
||||
}
|
||||
```
|
||||
|
||||
```rust
|
||||
use axum::Router;
|
||||
|
||||
pub fn build_router_for_tests() -> Router {
|
||||
Router::new()
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Add a distinct operation-system binary**
|
||||
|
||||
```rust
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
dotenv::dotenv().ok();
|
||||
let _ = app_operation_system::app::run().await;
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 5: Add a dedicated web placeholder**
|
||||
|
||||
```html
|
||||
<!doctype html>
|
||||
<html lang="zh-CN">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>运转系统专用版</title>
|
||||
</head>
|
||||
<body>
|
||||
<main>Operation system app scaffold</main>
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
- [ ] **Step 6: Run the operation-system test and type-check**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p app_operation_system operation_router_builds -- --exact
|
||||
cargo check -p app_operation_system
|
||||
```
|
||||
|
||||
Expected:
|
||||
- PASS
|
||||
- The operation-system app builds as a separate binary crate
|
||||
|
||||
- [ ] **Step 7: Commit**
|
||||
|
||||
```powershell
|
||||
git add crates/app_operation_system
|
||||
git commit -m "feat(ops): add operation-system app skeleton"
|
||||
```
|
||||
|
||||
## Task 9: Remove The Old Root Binary And Keep Compatibility Wrappers Only Where Needed
|
||||
|
||||
**Files:**
|
||||
- Modify or delete: root `src/main.rs`
|
||||
- Modify: any root module declarations that are now redundant
|
||||
- Optionally create: compatibility README notes for new build commands
|
||||
|
||||
- [ ] **Step 1: Write the failing package selection check**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo check
|
||||
```
|
||||
|
||||
Expected:
|
||||
- FAIL because the old root package layout no longer matches the workspace layout
|
||||
|
||||
- [ ] **Step 2: Remove the obsolete root binary packaging and leave only workspace members**
|
||||
|
||||
```text
|
||||
Delete the old root package entrypoint after both app crates compile.
|
||||
Do not leave a third unnamed binary package at repository root.
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Add explicit build instructions to the README**
|
||||
|
||||
```markdown
|
||||
## Build
|
||||
|
||||
```powershell
|
||||
cargo build -p app_feeder_distributor --release
|
||||
cargo build -p app_operation_system --release
|
||||
```
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Run workspace verification**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo check --workspace
|
||||
```
|
||||
|
||||
Expected:
|
||||
- PASS
|
||||
- Both app crates and the shared core compile from the workspace root
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```powershell
|
||||
git add README.md
|
||||
git rm src/main.rs
|
||||
git commit -m "build(workspace): remove obsolete root binary entrypoint"
|
||||
```
|
||||
|
||||
## Task 10: Verify Namespaced Event Storage And Both App Builds
|
||||
|
||||
**Files:**
|
||||
- Test: `crates/plc_platform_core/tests/event_namespace.rs`
|
||||
- Test: `crates/app_feeder_distributor/tests/router_smoke.rs`
|
||||
- Test: `crates/app_operation_system/tests/router_smoke.rs`
|
||||
- Modify: docs if build commands changed
|
||||
|
||||
- [ ] **Step 1: Add a final event naming regression test**
|
||||
|
||||
```rust
|
||||
#[test]
|
||||
fn event_namespaces_match_the_supported_apps() {
|
||||
let supported = ["platform.source_connected", "feeder.auto_control_started", "ops.unit_started"];
|
||||
for name in supported {
|
||||
assert!(name.contains('.'));
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run all focused tests**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo test -p plc_platform_core
|
||||
cargo test -p app_feeder_distributor
|
||||
cargo test -p app_operation_system
|
||||
```
|
||||
|
||||
Expected:
|
||||
- PASS for all three packages
|
||||
|
||||
- [ ] **Step 3: Run final workspace builds**
|
||||
|
||||
Run:
|
||||
|
||||
```powershell
|
||||
cargo build -p app_feeder_distributor --release
|
||||
cargo build -p app_operation_system --release
|
||||
```
|
||||
|
||||
Expected:
|
||||
- PASS
|
||||
- Two release binaries are produced successfully
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```powershell
|
||||
git add docs/superpowers/plans/2026-04-14-dual-app-shared-core-implementation.md crates README.md
|
||||
git commit -m "test(workspace): verify dual-app shared-core builds"
|
||||
```
|
||||
|
||||
## Self-Review
|
||||
|
||||
### Spec coverage
|
||||
|
||||
- Workspace + shared core + two apps: covered by Tasks 1, 2, 7, and 8
|
||||
- Shared module extraction: covered by Tasks 3, 4, and 6
|
||||
- Namespaced events in one table: covered by Tasks 5 and 10
|
||||
- Two exe outputs: covered by Tasks 9 and 10
|
||||
- Future single-app expansion path: preserved by the app-crate composition approach in Tasks 7 and 8
|
||||
|
||||
### Placeholder scan
|
||||
|
||||
- No placeholder markers remain in the task instructions
|
||||
- Each code-changing step includes concrete code or concrete commands
|
||||
|
||||
### Type consistency
|
||||
|
||||
- Shared state type is `PlatformContext`
|
||||
- Feeder business state is `FeederAppState`
|
||||
- Operation business state is `OperationAppState`
|
||||
- Shared event wrapper is `EventEnvelope`
|
||||
|
|
@ -0,0 +1,928 @@
|
|||
# Three-Panel Web Restructure And Handler Migration
|
||||
|
||||
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
|
||||
**Goal:** Restructure each app's UI into three logical panels (platform-config / app-config / ops), move stateless handlers (log, doc) to the shared core, and split API.md per application.
|
||||
|
||||
**Architecture:** Each app gets three tabs: "运维" (ops view), "应用配置" (app-specific config), "平台配置" (shared platform config). Core HTML pages are cleaned of unit references. Feeder overrides core pages that need unit-related UI via the ServeDir fallback chain. Stateless handlers (log, doc) move to `plc_platform_core::handler` so both apps can register them. Each app serves its own API doc. Handlers that depend on `AppState` (source, point, equipment, tag, page) stay in the feeder app — they require the `PlatformContext` refactor to move, which is deferred to a follow-up plan.
|
||||
|
||||
**Tech Stack:** Rust (Axum, tower-http), HTML/CSS/JS (vanilla ES modules), Cargo workspace
|
||||
|
||||
---
|
||||
|
||||
## File Map
|
||||
|
||||
### Core HTML changes (remove unit references)
|
||||
|
||||
- Modify: `web/core/html/source-panel.html` — remove unit list section, keep data source only
|
||||
- Modify: `web/core/html/equipment-panel.html` — remove batch-unit toolbar
|
||||
- Modify: `web/core/html/modals.html` — remove unit select from equipment modal
|
||||
|
||||
### Feeder HTML overrides (add unit references back)
|
||||
|
||||
- Create: `web/feeder/html/source-panel.html` — override with units + sources stacked
|
||||
- Create: `web/feeder/html/equipment-panel.html` — override with unit batch toolbar
|
||||
- Create: `web/feeder/html/modals.html` — override with unit select in equipment modal
|
||||
- Create: `web/feeder/html/unit-panel.html` — standalone unit config panel for app-config view
|
||||
|
||||
### Feeder three-tab UI
|
||||
|
||||
- Modify: `web/feeder/html/topbar.html` — add third tab
|
||||
- Modify: `web/feeder/index.html` — add unit-panel partial, add grid-app-config layout
|
||||
- Modify: `web/feeder/js/app.js` — three-way view switching
|
||||
- Modify: `web/feeder/js/dom.js` — add tabAppConfig selector
|
||||
- Modify: `web/core/styles.css` — add grid-app-config layout
|
||||
|
||||
### Handler migration to core
|
||||
|
||||
- Create: `crates/plc_platform_core/src/handler.rs` — module declarations
|
||||
- Create: `crates/plc_platform_core/src/handler/log.rs` — moved from feeder
|
||||
- Create: `crates/plc_platform_core/src/handler/doc.rs` — generic markdown serving
|
||||
- Modify: `crates/plc_platform_core/src/lib.rs` — export handler module
|
||||
- Modify: `crates/plc_platform_core/Cargo.toml` — add async-stream if needed
|
||||
- Modify: `crates/app_feeder_distributor/src/handler.rs` — replace log/doc with re-exports
|
||||
- Modify: `crates/app_feeder_distributor/src/handler/doc.rs` — thin wrapper calling core
|
||||
- Delete: `crates/app_feeder_distributor/src/handler/log.rs` — replaced by core
|
||||
|
||||
### Ops app handler registration
|
||||
|
||||
- Modify: `crates/app_operation_system/src/lib.rs` — add handler module
|
||||
- Create: `crates/app_operation_system/src/handler.rs` — module declarations
|
||||
- Create: `crates/app_operation_system/src/handler/doc.rs` — ops-specific doc handler
|
||||
- Modify: `crates/app_operation_system/src/router.rs` — register log/doc routes
|
||||
- Modify: `crates/app_operation_system/Cargo.toml` — add needed deps
|
||||
|
||||
### API.md split
|
||||
|
||||
- Rename: `API.md` → `docs/api-feeder.md`
|
||||
- Create: `docs/api-ops.md` — ops API (health endpoint for now)
|
||||
|
||||
---
|
||||
|
||||
## Task 1: Clean Core HTML — Remove Unit References
|
||||
|
||||
**Files:**
|
||||
- Modify: `web/core/html/source-panel.html`
|
||||
- Modify: `web/core/html/equipment-panel.html`
|
||||
- Modify: `web/core/html/modals.html`
|
||||
|
||||
- [ ] **Step 1: Rewrite core source-panel to data sources only**
|
||||
|
||||
Replace `web/core/html/source-panel.html` with:
|
||||
|
||||
```html
|
||||
<section class="panel bottom-left">
|
||||
<div class="panel-head">
|
||||
<h2>数据源</h2>
|
||||
<button type="button" id="openSourceForm">+ 新增</button>
|
||||
</div>
|
||||
<div class="source-panels" id="sourceList"></div>
|
||||
</section>
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Remove batch-unit toolbar from core equipment-panel**
|
||||
|
||||
Replace `web/core/html/equipment-panel.html` with:
|
||||
|
||||
```html
|
||||
<section class="panel top-left">
|
||||
<div class="panel-head">
|
||||
<h2>设备</h2>
|
||||
<button type="button" id="newEquipmentBtn">+ 新增</button>
|
||||
</div>
|
||||
<div class="toolbar equipment-toolbar">
|
||||
<input id="equipmentKeyword" placeholder="搜索编码或名称" />
|
||||
<button type="button" class="secondary" id="refreshEquipmentBtn">刷新</button>
|
||||
</div>
|
||||
<div class="list equipment-list" id="equipmentList"></div>
|
||||
</section>
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Remove unit select from core equipment modal**
|
||||
|
||||
In `web/core/html/modals.html`, remove the unit select label from the equipment modal. Replace the equipment modal section (lines 1-35) with:
|
||||
|
||||
```html
|
||||
<div class="modal hidden" id="equipmentModal">
|
||||
<div class="modal-content modal-sm">
|
||||
<div class="modal-head">
|
||||
<h3>设备配置</h3>
|
||||
<button class="secondary" id="closeEquipmentModal">X</button>
|
||||
</div>
|
||||
<form id="equipmentForm" class="form">
|
||||
<input type="hidden" id="equipmentId" />
|
||||
<label>
|
||||
编码
|
||||
<input id="equipmentCode" required />
|
||||
</label>
|
||||
<label>
|
||||
名称
|
||||
<input id="equipmentName" required />
|
||||
</label>
|
||||
<label>
|
||||
类型
|
||||
<select id="equipmentKind"></select>
|
||||
</label>
|
||||
<label>
|
||||
说明
|
||||
<input id="equipmentDescription" />
|
||||
</label>
|
||||
<div class="form-actions">
|
||||
<button type="button" class="secondary" id="equipmentReset">清空</button>
|
||||
<button type="submit" id="equipmentSubmit">保存</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
```
|
||||
|
||||
Keep the rest of modals.html (point, source, binding modals) unchanged.
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add web/core/html/source-panel.html web/core/html/equipment-panel.html web/core/html/modals.html
|
||||
git commit -m "refactor(web): remove unit references from core HTML pages"
|
||||
```
|
||||
|
||||
## Task 2: Create Feeder Override Pages With Unit References
|
||||
|
||||
**Files:**
|
||||
- Create: `web/feeder/html/source-panel.html`
|
||||
- Create: `web/feeder/html/equipment-panel.html`
|
||||
- Create: `web/feeder/html/modals.html`
|
||||
|
||||
- [ ] **Step 1: Create feeder source-panel override (units + sources stacked)**
|
||||
|
||||
`web/feeder/html/source-panel.html` — same as the original file with both sections:
|
||||
|
||||
```html
|
||||
<section class="panel bottom-left">
|
||||
<div class="stack-panel">
|
||||
<div class="stack-section">
|
||||
<div class="panel-head">
|
||||
<h2>控制单元</h2>
|
||||
<div class="toolbar">
|
||||
<button type="button" class="secondary" id="refreshUnitBtn">刷新</button>
|
||||
<button type="button" id="newUnitBtn">+ 新增</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="list unit-list" id="unitList"></div>
|
||||
</div>
|
||||
|
||||
<div class="stack-section stack-section-bordered">
|
||||
<div class="panel-head">
|
||||
<h2>数据源</h2>
|
||||
<button type="button" id="openSourceForm">+ 新增</button>
|
||||
</div>
|
||||
<div class="source-panels" id="sourceList"></div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Create feeder equipment-panel override (with unit batch toolbar)**
|
||||
|
||||
`web/feeder/html/equipment-panel.html` — same as original with batch-unit toolbar:
|
||||
|
||||
```html
|
||||
<section class="panel top-left">
|
||||
<div class="panel-head">
|
||||
<h2>设备</h2>
|
||||
<button type="button" id="newEquipmentBtn">+ 新增</button>
|
||||
</div>
|
||||
<div class="toolbar equipment-toolbar">
|
||||
<input id="equipmentKeyword" placeholder="搜索编码或名称" />
|
||||
<button type="button" class="secondary" id="refreshEquipmentBtn">刷新</button>
|
||||
</div>
|
||||
<div class="toolbar equipment-batch-toolbar">
|
||||
<div class="muted" id="selectedEquipmentSummary">已选 0 台设备</div>
|
||||
<select id="equipmentBatchUnitId"></select>
|
||||
<button type="button" class="secondary" id="clearEquipmentSelectionBtn">清空选择</button>
|
||||
<button type="button" id="applyEquipmentUnitBtn">批量设单元</button>
|
||||
</div>
|
||||
<div class="list equipment-list" id="equipmentList"></div>
|
||||
</section>
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Create feeder modals override (with unit select in equipment modal)**
|
||||
|
||||
`web/feeder/html/modals.html` — copy of the ORIGINAL core modals.html (before Task 1 removes the unit select). This file overrides the core version via the ServeDir fallback chain. The equipment modal retains the unit select field that Task 1 removes from core.
|
||||
|
||||
Write the full file with this content (identical to the original core modals.html):
|
||||
|
||||
```html
|
||||
<div class="modal hidden" id="equipmentModal">
|
||||
<div class="modal-content modal-sm">
|
||||
<div class="modal-head">
|
||||
<h3>设备配置</h3>
|
||||
<button class="secondary" id="closeEquipmentModal">X</button>
|
||||
</div>
|
||||
<form id="equipmentForm" class="form">
|
||||
<input type="hidden" id="equipmentId" />
|
||||
<label>
|
||||
所属单元
|
||||
<select id="equipmentUnitId"></select>
|
||||
</label>
|
||||
<label>
|
||||
编码
|
||||
<input id="equipmentCode" required />
|
||||
</label>
|
||||
<label>
|
||||
名称
|
||||
<input id="equipmentName" required />
|
||||
</label>
|
||||
<label>
|
||||
类型
|
||||
<select id="equipmentKind"></select>
|
||||
</label>
|
||||
<label>
|
||||
说明
|
||||
<input id="equipmentDescription" />
|
||||
</label>
|
||||
<div class="form-actions">
|
||||
<button type="button" class="secondary" id="equipmentReset">清空</button>
|
||||
<button type="submit" id="equipmentSubmit">保存</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="pointModal">
|
||||
<div class="modal-content">
|
||||
<div class="modal-head">
|
||||
<h3>选择节点创建点位</h3>
|
||||
<button class="secondary" id="closeModal">X</button>
|
||||
</div>
|
||||
<div class="toolbar">
|
||||
<select id="pointSourceSelect"></select>
|
||||
<div class="muted" id="pointSourceNodeCount">Nodes: 0</div>
|
||||
<button id="browseNodes">加载节点</button>
|
||||
<button class="secondary" id="refreshTree">刷新树</button>
|
||||
</div>
|
||||
<div class="tree" id="nodeTree"></div>
|
||||
<div class="modal-foot">
|
||||
<div class="muted" id="selectedCount">已选中 0 个节点</div>
|
||||
<button id="createPoints">创建设备点位</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="sourceModal">
|
||||
<div class="modal-content modal-sm">
|
||||
<div class="modal-head">
|
||||
<h3>Source 配置</h3>
|
||||
<button class="secondary" id="closeSourceModal">X</button>
|
||||
</div>
|
||||
<form id="sourceForm" class="form">
|
||||
<input type="hidden" id="sourceId" />
|
||||
<label>
|
||||
名称
|
||||
<input id="sourceName" required />
|
||||
</label>
|
||||
<label>
|
||||
Endpoint
|
||||
<input id="sourceEndpoint" placeholder="opc.tcp://host:port" required />
|
||||
</label>
|
||||
<label class="check-row">
|
||||
<input type="checkbox" id="sourceEnabled" checked />
|
||||
<span>启用</span>
|
||||
</label>
|
||||
<div class="form-actions">
|
||||
<button type="button" class="secondary" id="sourceReset">清空</button>
|
||||
<button type="submit" id="sourceSubmit">保存</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="pointBindingModal">
|
||||
<div class="modal-content modal-sm">
|
||||
<div class="modal-head">
|
||||
<h3>绑定点位</h3>
|
||||
<button class="secondary" id="closePointBindingModal">X</button>
|
||||
</div>
|
||||
<form id="pointBindingForm" class="form">
|
||||
<input type="hidden" id="bindingPointId" />
|
||||
<label>
|
||||
点位
|
||||
<input id="bindingPointName" disabled />
|
||||
</label>
|
||||
<label>
|
||||
设备
|
||||
<select id="bindingEquipmentId"></select>
|
||||
</label>
|
||||
<label>
|
||||
角色模板
|
||||
<select id="bindingSignalRole"></select>
|
||||
</label>
|
||||
<div class="form-actions">
|
||||
<button type="button" class="secondary" id="clearPointBinding">清空绑定</button>
|
||||
<button type="submit" id="savePointBinding">保存</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="batchBindingModal">
|
||||
<div class="modal-content modal-sm">
|
||||
<div class="modal-head">
|
||||
<h3>批量绑定点位</h3>
|
||||
<button class="secondary" id="closeBatchBindingModal">X</button>
|
||||
</div>
|
||||
<form id="batchBindingForm" class="form">
|
||||
<div class="muted" id="batchBindingSummary">已选中 0 个点位</div>
|
||||
<label>
|
||||
设备
|
||||
<select id="batchBindingEquipmentId"></select>
|
||||
</label>
|
||||
<label>
|
||||
角色模板
|
||||
<select id="batchBindingSignalRole"></select>
|
||||
</label>
|
||||
<div class="form-actions">
|
||||
<button type="button" class="secondary" id="clearBatchBinding">清空设备和角色</button>
|
||||
<button type="submit" id="saveBatchBinding">批量保存</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add web/feeder/html/source-panel.html web/feeder/html/equipment-panel.html web/feeder/html/modals.html
|
||||
git commit -m "refactor(web): create feeder overrides for unit-dependent pages"
|
||||
```
|
||||
|
||||
## Task 3: Create Feeder Unit Panel And Add Three-Tab UI
|
||||
|
||||
**Files:**
|
||||
- Create: `web/feeder/html/unit-panel.html`
|
||||
- Modify: `web/feeder/html/topbar.html`
|
||||
- Modify: `web/feeder/index.html`
|
||||
- Modify: `web/feeder/js/dom.js`
|
||||
- Modify: `web/feeder/js/app.js`
|
||||
- Modify: `web/core/styles.css`
|
||||
|
||||
- [ ] **Step 1: Create standalone unit-panel for app-config view**
|
||||
|
||||
`web/feeder/html/unit-panel.html`:
|
||||
|
||||
```html
|
||||
<section class="panel app-config-main">
|
||||
<div class="panel-head">
|
||||
<h2>控制单元配置</h2>
|
||||
<div class="toolbar">
|
||||
<button type="button" class="secondary" id="refreshUnitBtn2">刷新</button>
|
||||
<button type="button" id="newUnitBtn2">+ 新增</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="list unit-config-list" id="unitConfigList"></div>
|
||||
</section>
|
||||
```
|
||||
|
||||
Note: Uses separate IDs (`refreshUnitBtn2`, `newUnitBtn2`, `unitConfigList`) to avoid DOM ID conflicts with the unit list in the source-panel override. The JS wiring will connect these to the same handler functions.
|
||||
|
||||
- [ ] **Step 2: Update topbar for three tabs**
|
||||
|
||||
Replace `web/feeder/html/topbar.html`:
|
||||
|
||||
```html
|
||||
<header class="topbar">
|
||||
<div class="title">投煤器布料机控制系统</div>
|
||||
<div class="tab-bar">
|
||||
<button type="button" class="tab-btn active" id="tabOps">运维</button>
|
||||
<button type="button" class="tab-btn" id="tabAppConfig">应用配置</button>
|
||||
<button type="button" class="tab-btn" id="tabConfig">平台配置</button>
|
||||
</div>
|
||||
<div class="topbar-actions">
|
||||
<button type="button" class="secondary" id="openReadmeDoc">README.md</button>
|
||||
<button type="button" class="secondary" id="openApiDoc">API.md</button>
|
||||
<div class="status" id="statusText">
|
||||
<span class="ws-dot" id="wsDot"></span>
|
||||
<span id="wsLabel">连接中…</span>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Replace feeder index.html `<main>` contents to include unit-panel partial**
|
||||
|
||||
Replace the entire `<main>` block in `web/feeder/index.html`:
|
||||
|
||||
```html
|
||||
<main class="grid-ops">
|
||||
<div data-partial="/ui/html/ops-panel.html"></div>
|
||||
<div data-partial="/ui/html/equipment-panel.html"></div>
|
||||
<div data-partial="/ui/html/points-panel.html"></div>
|
||||
<div data-partial="/ui/html/source-panel.html"></div>
|
||||
<div data-partial="/ui/html/log-stream-panel.html"></div>
|
||||
<div data-partial="/ui/html/chart-panel.html"></div>
|
||||
<div data-partial="/ui/html/unit-panel.html"></div>
|
||||
<div data-partial="/ui/html/logs-panel.html"></div>
|
||||
</main>
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Add grid-app-config layout to styles.css**
|
||||
|
||||
Add after the existing `.grid-ops` rule in `web/core/styles.css`:
|
||||
|
||||
```css
|
||||
.grid-app-config {
|
||||
display: grid;
|
||||
gap: 1px;
|
||||
height: calc(100vh - var(--topbar-h));
|
||||
grid-template-columns: 1fr;
|
||||
grid-template-rows: 1fr;
|
||||
}
|
||||
|
||||
.grid-app-config .panel.app-config-main { grid-column: 1; grid-row: 1; }
|
||||
```
|
||||
|
||||
Also add the responsive override inside the existing `@media (max-width: 900px)` block:
|
||||
|
||||
```css
|
||||
.grid-app-config {
|
||||
grid-template-columns: 1fr;
|
||||
grid-template-rows: auto;
|
||||
height: auto;
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 5: Add tabAppConfig to dom.js**
|
||||
|
||||
In `web/feeder/js/dom.js`, add:
|
||||
|
||||
```javascript
|
||||
tabAppConfig: byId("tabAppConfig"),
|
||||
refreshUnitBtn2: byId("refreshUnitBtn2"),
|
||||
newUnitBtn2: byId("newUnitBtn2"),
|
||||
unitConfigList: byId("unitConfigList"),
|
||||
```
|
||||
|
||||
- [ ] **Step 6: Update app.js for three-way view switching**
|
||||
|
||||
Replace the `switchView` function in `web/feeder/js/app.js`:
|
||||
|
||||
```javascript
|
||||
function switchView(view) {
|
||||
state.activeView = view;
|
||||
const main = document.querySelector("main");
|
||||
main.className =
|
||||
view === "ops" ? "grid-ops" :
|
||||
view === "app-config" ? "grid-app-config" :
|
||||
"grid-config";
|
||||
|
||||
dom.tabOps.classList.toggle("active", view === "ops");
|
||||
dom.tabAppConfig.classList.toggle("active", view === "app-config");
|
||||
dom.tabConfig.classList.toggle("active", view === "config");
|
||||
|
||||
// config-only panels (platform config view)
|
||||
["top-left", "top-right", "bottom-left", "bottom-right"].forEach((cls) => {
|
||||
const el = main.querySelector(`.panel.${cls}`);
|
||||
if (el) el.classList.toggle("hidden", view !== "config");
|
||||
});
|
||||
const logStreamPanel = main.querySelector(".panel.bottom-mid");
|
||||
if (logStreamPanel) logStreamPanel.classList.toggle("hidden", view !== "config");
|
||||
|
||||
// ops-only panels
|
||||
const opsMain = main.querySelector(".panel.ops-main");
|
||||
const opsBottom = main.querySelector(".panel.ops-bottom");
|
||||
if (opsMain) opsMain.classList.toggle("hidden", view !== "ops");
|
||||
if (opsBottom) opsBottom.classList.toggle("hidden", view !== "ops");
|
||||
|
||||
// app-config-only panels
|
||||
const appConfigMain = main.querySelector(".panel.app-config-main");
|
||||
if (appConfigMain) appConfigMain.classList.toggle("hidden", view !== "app-config");
|
||||
|
||||
if (view === "config") {
|
||||
startLogs();
|
||||
if (!_configLoaded) {
|
||||
_configLoaded = true;
|
||||
withStatus((async () => {
|
||||
await Promise.all([loadSources(), loadEquipments(), loadEvents()]);
|
||||
await loadPoints();
|
||||
})());
|
||||
}
|
||||
} else {
|
||||
stopLogs();
|
||||
}
|
||||
|
||||
if (view === "app-config") {
|
||||
if (!_appConfigLoaded) {
|
||||
_appConfigLoaded = true;
|
||||
withStatus(loadUnits());
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Add `let _appConfigLoaded = false;` alongside the existing `let _configLoaded = false;`.
|
||||
|
||||
Update `bindEvents` to add:
|
||||
|
||||
```javascript
|
||||
dom.tabAppConfig.addEventListener("click", () => switchView("app-config"));
|
||||
dom.refreshUnitBtn2.addEventListener("click", () => withStatus(loadUnits()));
|
||||
dom.newUnitBtn2.addEventListener("click", openCreateUnitModal);
|
||||
```
|
||||
|
||||
- [ ] **Step 7: Verify no broken partials**
|
||||
|
||||
Run feeder locally, check all three tabs load without console errors.
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cargo check -p app_feeder_distributor
|
||||
```
|
||||
|
||||
Expected: PASS (no Rust changes in this task)
|
||||
|
||||
- [ ] **Step 8: Commit**
|
||||
|
||||
```bash
|
||||
git add web/feeder/html/unit-panel.html web/feeder/html/topbar.html web/feeder/index.html web/feeder/js/dom.js web/feeder/js/app.js web/core/styles.css
|
||||
git commit -m "feat(feeder): add three-tab UI (ops / app-config / platform-config)"
|
||||
```
|
||||
|
||||
## Task 4: Move Log Handler To Core
|
||||
|
||||
**Files:**
|
||||
- Create: `crates/plc_platform_core/src/handler.rs`
|
||||
- Create: `crates/plc_platform_core/src/handler/log.rs`
|
||||
- Modify: `crates/plc_platform_core/src/lib.rs`
|
||||
- Modify: `crates/app_feeder_distributor/src/handler.rs`
|
||||
- Delete: `crates/app_feeder_distributor/src/handler/log.rs`
|
||||
|
||||
- [ ] **Step 1: Create core handler module**
|
||||
|
||||
`crates/plc_platform_core/src/handler.rs`:
|
||||
|
||||
```rust
|
||||
pub mod log;
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Move log.rs to core**
|
||||
|
||||
Copy `crates/app_feeder_distributor/src/handler/log.rs` to `crates/plc_platform_core/src/handler/log.rs`.
|
||||
|
||||
Change the import path from `plc_platform_core::util::response::ApiErr` to `crate::util::response::ApiErr`.
|
||||
|
||||
- [ ] **Step 3: Export handler module from core lib.rs**
|
||||
|
||||
Add `pub mod handler;` to `crates/plc_platform_core/src/lib.rs`.
|
||||
|
||||
- [ ] **Step 4: Update feeder handler.rs to re-export log from core**
|
||||
|
||||
In `crates/app_feeder_distributor/src/handler.rs`, replace:
|
||||
|
||||
```rust
|
||||
pub mod log;
|
||||
```
|
||||
|
||||
with:
|
||||
|
||||
```rust
|
||||
pub mod log {
|
||||
pub use plc_platform_core::handler::log::*;
|
||||
}
|
||||
```
|
||||
|
||||
Delete `crates/app_feeder_distributor/src/handler/log.rs`.
|
||||
|
||||
- [ ] **Step 5: Verify feeder compiles**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cargo check -p app_feeder_distributor
|
||||
```
|
||||
|
||||
Expected: PASS
|
||||
|
||||
- [ ] **Step 6: Commit**
|
||||
|
||||
```bash
|
||||
git add crates/plc_platform_core/src/handler.rs crates/plc_platform_core/src/handler/log.rs crates/plc_platform_core/src/lib.rs crates/app_feeder_distributor/src/handler.rs
|
||||
git rm crates/app_feeder_distributor/src/handler/log.rs
|
||||
git commit -m "refactor(core): move log handler to shared platform core"
|
||||
```
|
||||
|
||||
## Task 5: Move Doc Handler To Core And Split API.md
|
||||
|
||||
**Files:**
|
||||
- Create: `crates/plc_platform_core/src/handler/doc.rs`
|
||||
- Modify: `crates/plc_platform_core/src/handler.rs`
|
||||
- Modify: `crates/app_feeder_distributor/src/handler.rs`
|
||||
- Modify: `crates/app_feeder_distributor/src/handler/doc.rs`
|
||||
- Create: `crates/app_operation_system/src/handler.rs`
|
||||
- Create: `crates/app_operation_system/src/handler/doc.rs`
|
||||
- Rename: `API.md` → `docs/api-feeder.md`
|
||||
- Create: `docs/api-ops.md`
|
||||
|
||||
- [ ] **Step 1: Create core generic markdown serving utility**
|
||||
|
||||
`crates/plc_platform_core/src/handler/doc.rs`:
|
||||
|
||||
```rust
|
||||
use axum::{
|
||||
http::{header, HeaderMap, HeaderValue, StatusCode},
|
||||
response::IntoResponse,
|
||||
};
|
||||
|
||||
use crate::util::response::ApiErr;
|
||||
|
||||
pub async fn serve_markdown(path: &str) -> Result<impl IntoResponse, ApiErr> {
|
||||
let content = tokio::fs::read_to_string(path)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
tracing::error!("Failed to read {}: {}", path, err);
|
||||
ApiErr::NotFound(format!("{} not found", path), None)
|
||||
})?;
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
HeaderValue::from_static("text/markdown; charset=utf-8"),
|
||||
);
|
||||
|
||||
Ok((StatusCode::OK, headers, content))
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Add doc to core handler module**
|
||||
|
||||
In `crates/plc_platform_core/src/handler.rs`, add:
|
||||
|
||||
```rust
|
||||
pub mod doc;
|
||||
pub mod log;
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Update feeder doc handler to use core utility**
|
||||
|
||||
Replace `crates/app_feeder_distributor/src/handler/doc.rs`:
|
||||
|
||||
```rust
|
||||
use axum::response::IntoResponse;
|
||||
use plc_platform_core::util::response::ApiErr;
|
||||
|
||||
pub async fn get_api_md() -> Result<impl IntoResponse, ApiErr> {
|
||||
plc_platform_core::handler::doc::serve_markdown("docs/api-feeder.md").await
|
||||
}
|
||||
|
||||
pub async fn get_readme_md() -> Result<impl IntoResponse, ApiErr> {
|
||||
plc_platform_core::handler::doc::serve_markdown("README.md").await
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Rename API.md to docs/api-feeder.md**
|
||||
|
||||
```bash
|
||||
git mv API.md docs/api-feeder.md
|
||||
```
|
||||
|
||||
- [ ] **Step 5: Create ops API doc**
|
||||
|
||||
`docs/api-ops.md`:
|
||||
|
||||
```markdown
|
||||
# 运转系统 API
|
||||
|
||||
## 健康检查
|
||||
|
||||
- `GET /api/health` — 返回应用名称和状态
|
||||
|
||||
## 日志
|
||||
|
||||
- `GET /api/logs` — 拉取日志内容
|
||||
- `GET /api/logs/stream` — SSE 增量推送
|
||||
|
||||
## 文档
|
||||
|
||||
- `GET /api/docs/api-md` — 获取 API 文档
|
||||
- `GET /api/docs/readme-md` — 获取 README
|
||||
```
|
||||
|
||||
- [ ] **Step 6: Create ops handler module with doc handler**
|
||||
|
||||
`crates/app_operation_system/src/handler.rs`:
|
||||
|
||||
```rust
|
||||
pub mod doc;
|
||||
```
|
||||
|
||||
`crates/app_operation_system/src/handler/doc.rs`:
|
||||
|
||||
```rust
|
||||
use axum::response::IntoResponse;
|
||||
use plc_platform_core::util::response::ApiErr;
|
||||
|
||||
pub async fn get_api_md() -> Result<impl IntoResponse, ApiErr> {
|
||||
plc_platform_core::handler::doc::serve_markdown("docs/api-ops.md").await
|
||||
}
|
||||
|
||||
pub async fn get_readme_md() -> Result<impl IntoResponse, ApiErr> {
|
||||
plc_platform_core::handler::doc::serve_markdown("README.md").await
|
||||
}
|
||||
```
|
||||
|
||||
Add `pub mod handler;` to `crates/app_operation_system/src/lib.rs`.
|
||||
|
||||
- [ ] **Step 7: Verify both apps compile**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cargo check -p app_feeder_distributor
|
||||
cargo check -p app_operation_system
|
||||
```
|
||||
|
||||
Expected: both PASS
|
||||
|
||||
- [ ] **Step 8: Commit**
|
||||
|
||||
```bash
|
||||
git add crates/plc_platform_core/src/handler/doc.rs crates/plc_platform_core/src/handler.rs crates/app_feeder_distributor/src/handler/doc.rs crates/app_operation_system/src/handler.rs crates/app_operation_system/src/handler/doc.rs crates/app_operation_system/src/lib.rs crates/app_operation_system/Cargo.toml docs/api-feeder.md docs/api-ops.md
|
||||
git commit -m "refactor(core): move doc handler to core and split API.md per app"
|
||||
```
|
||||
|
||||
## Task 6: Register Core Handlers In Ops App Router
|
||||
|
||||
**Files:**
|
||||
- Modify: `crates/app_operation_system/src/router.rs`
|
||||
|
||||
- [ ] **Step 1: Add log and doc routes to ops router**
|
||||
|
||||
Update `crates/app_operation_system/src/router.rs`:
|
||||
|
||||
```rust
|
||||
use axum::{extract::State, routing::get, Router};
|
||||
use tower_http::services::ServeDir;
|
||||
|
||||
use crate::app::AppState;
|
||||
|
||||
async fn no_cache(
|
||||
req: axum::extract::Request,
|
||||
next: axum::middleware::Next,
|
||||
) -> axum::response::Response {
|
||||
let mut response = next.run(req).await;
|
||||
response.headers_mut().insert(
|
||||
axum::http::header::CACHE_CONTROL,
|
||||
axum::http::HeaderValue::from_static("no-store"),
|
||||
);
|
||||
response
|
||||
}
|
||||
|
||||
pub fn build_router(state: AppState) -> Router {
|
||||
Router::new()
|
||||
.route("/api/health", get(health_check))
|
||||
.route("/api/logs", get(plc_platform_core::handler::log::get_logs))
|
||||
.route("/api/logs/stream", get(plc_platform_core::handler::log::stream_logs))
|
||||
.route("/api/docs/api-md", get(crate::handler::doc::get_api_md))
|
||||
.route("/api/docs/readme-md", get(crate::handler::doc::get_readme_md))
|
||||
.nest(
|
||||
"/ui",
|
||||
Router::new()
|
||||
.fallback_service(
|
||||
ServeDir::new("web/ops")
|
||||
.append_index_html_on_directories(true)
|
||||
.fallback(ServeDir::new("web/core")),
|
||||
)
|
||||
.layer(axum::middleware::from_fn(no_cache)),
|
||||
)
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
async fn health_check(State(state): State<AppState>) -> String {
|
||||
format!("{}:ok", state.app_name)
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Verify ops compiles and tests pass**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cargo check -p app_operation_system
|
||||
cargo test -p app_operation_system
|
||||
```
|
||||
|
||||
Expected: both PASS
|
||||
|
||||
- [ ] **Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add crates/app_operation_system/src/router.rs
|
||||
git commit -m "feat(ops): register log and doc routes from shared core"
|
||||
```
|
||||
|
||||
## Task 7: Update Feeder Router API Doc Path Reference
|
||||
|
||||
**Files:**
|
||||
- Modify: `crates/app_feeder_distributor/src/router.rs` (no change if doc handler wrapper handles path)
|
||||
|
||||
- [ ] **Step 1: Verify feeder doc route still works with renamed file**
|
||||
|
||||
The feeder doc handler now reads `docs/api-feeder.md` instead of `API.md`. The route `/api/docs/api-md` stays the same — only the file path changed inside the handler.
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cargo test -p app_feeder_distributor
|
||||
```
|
||||
|
||||
Expected: PASS
|
||||
|
||||
- [ ] **Step 2: Update README reference to API.md**
|
||||
|
||||
In `README.md`, replace the doc index entry:
|
||||
|
||||
Before:
|
||||
```markdown
|
||||
- API 接口说明: `API.md`
|
||||
```
|
||||
|
||||
After:
|
||||
```markdown
|
||||
- 投煤器布料机 API: `docs/api-feeder.md`
|
||||
- 运转系统 API: `docs/api-ops.md`
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add README.md
|
||||
git commit -m "docs: update API doc references for per-app split"
|
||||
```
|
||||
|
||||
## Task 8: Final Verification
|
||||
|
||||
- [ ] **Step 1: Run all workspace tests**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cargo test --workspace
|
||||
```
|
||||
|
||||
Expected: all PASS
|
||||
|
||||
- [ ] **Step 2: Run release builds**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cargo build -p app_feeder_distributor --release
|
||||
cargo build -p app_operation_system --release
|
||||
```
|
||||
|
||||
Expected: both produce binaries
|
||||
|
||||
- [ ] **Step 3: Verify web file layout**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
find web -type f | sort
|
||||
```
|
||||
|
||||
Expected: core pages have no unit references, feeder overrides contain unit references, both apps have their own topbar and index.
|
||||
|
||||
- [ ] **Step 4: Verify core HTML has no unit references**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
grep -ri "unit" web/core/ --include="*.html"
|
||||
```
|
||||
|
||||
Expected: no matches
|
||||
|
||||
## Self-Review
|
||||
|
||||
### Coverage
|
||||
|
||||
- Three-panel UI (ops / app-config / platform-config): Tasks 1–3
|
||||
- Core HTML cleaned of unit references: Task 1
|
||||
- Feeder overrides via fallback chain: Task 2
|
||||
- Log handler to core: Task 4
|
||||
- Doc handler to core + API.md split: Tasks 5, 7
|
||||
- Ops app gets log/doc routes: Task 6
|
||||
- Build verification: Task 8
|
||||
|
||||
### What this plan does NOT cover (deferred)
|
||||
|
||||
- **PlatformContext completion**: Filling in pool/connection_manager/event_manager/ws_manager in the core context struct. This is a prerequisite for moving the remaining handlers (source, point, equipment, tag, page) to core.
|
||||
- **Remaining handler migration**: source.rs (626 lines), point.rs (693 lines), equipment.rs (335 lines), tag.rs (126 lines), page.rs (169 lines) all depend on `AppState` and require PlatformContext to move to core.
|
||||
- **Unit-panel JS wiring**: The standalone unit-panel in app-config view needs JS to render unit list and handle CRUD. Currently the unit rendering logic in `units.js` targets `#unitList` (in source-panel). Wiring `#unitConfigList` to the same data is a follow-up JS task.
|
||||
|
||||
### Key design decisions
|
||||
|
||||
- **ServeDir fallback override pattern**: Feeder overrides core pages by placing same-named files in `web/feeder/html/`. The fallback chain tries app dir first. This means core files are the "clean" base, feeder adds business-specific UI on top.
|
||||
- **Separate unit-panel IDs**: Uses `refreshUnitBtn2`, `newUnitBtn2`, `unitConfigList` to avoid conflicts with the unit list embedded in the feeder source-panel override. Both are wired to the same `loadUnits()` / `openCreateUnitModal()` functions.
|
||||
- **Log handler is fully stateless**: Reads files from `./logs` directory. No AppState dependency. Trivially movable to core.
|
||||
- **Doc handler split**: Core provides `serve_markdown(path)` utility. Each app wraps it to point to its own API doc file.
|
||||
|
|
@ -0,0 +1,564 @@
|
|||
# Web Page Split And Root Source Cleanup
|
||||
|
||||
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
|
||||
**Goal:** Restructure the `web/` directory into `core/` + `feeder/` + `ops/` subdirectories, delete the obsolete root `src/` files, and update README to reflect the new workspace layout.
|
||||
|
||||
**Architecture:** Web pages split into a shared `web/core/` (platform HTML partials and CSS) and per-app directories (`web/feeder/`, `web/ops/`). Each app's Axum router uses `ServeDir` with fallback: try app-specific dir first, then core. This means no URL changes in HTML/JS — the fallback chain resolves transparently. The root `src/` contains stale copies of files already migrated to crates and must be removed.
|
||||
|
||||
**Tech Stack:** Rust (Axum, tower-http ServeDir), HTML/CSS/JS (vanilla, ES modules), Cargo workspace
|
||||
|
||||
---
|
||||
|
||||
## File Map
|
||||
|
||||
### Web core (shared platform pages)
|
||||
|
||||
- Move: `web/styles.css` → `web/core/styles.css`
|
||||
- Move: `web/html/source-panel.html` → `web/core/html/source-panel.html`
|
||||
- Move: `web/html/points-panel.html` → `web/core/html/points-panel.html`
|
||||
- Move: `web/html/equipment-panel.html` → `web/core/html/equipment-panel.html`
|
||||
- Move: `web/html/chart-panel.html` → `web/core/html/chart-panel.html`
|
||||
- Move: `web/html/log-stream-panel.html` → `web/core/html/log-stream-panel.html`
|
||||
- Move: `web/html/logs-panel.html` → `web/core/html/logs-panel.html`
|
||||
- Move: `web/html/api-doc-drawer.html` → `web/core/html/api-doc-drawer.html`
|
||||
- Create: `web/core/html/modals.html` (core modals only — equipment, source, point, binding; unit modal removed)
|
||||
|
||||
### Web feeder (feeder-specific pages + all JS)
|
||||
|
||||
- Move: `web/index.html` → `web/feeder/index.html` (add unit-modal partial reference)
|
||||
- Move: `web/html/topbar.html` → `web/feeder/html/topbar.html`
|
||||
- Move: `web/html/ops-panel.html` → `web/feeder/html/ops-panel.html`
|
||||
- Create: `web/feeder/html/unit-modal.html` (extracted from old modals.html)
|
||||
- Move: `web/js/*.js` → `web/feeder/js/*.js` (all 15 JS files stay together as interconnected module graph)
|
||||
|
||||
### Web ops (operation-system pages)
|
||||
|
||||
- Move: `crates/app_operation_system/web/index.html` → `web/ops/index.html` (updated content)
|
||||
- Create: `web/ops/html/topbar.html`
|
||||
- Create: `web/ops/js/index.js`
|
||||
- Create: `web/ops/js/app.js`
|
||||
|
||||
### Rust router changes
|
||||
|
||||
- Modify: `crates/app_feeder_distributor/src/router.rs` (update ServeDir to use fallback)
|
||||
- Modify: `crates/app_operation_system/src/router.rs` (update ServeDir to use fallback)
|
||||
|
||||
### Root src cleanup
|
||||
|
||||
- Delete: all 19 files under `src/` (stale duplicates of files in crates)
|
||||
|
||||
### Documentation
|
||||
|
||||
- Modify: `README.md`
|
||||
|
||||
---
|
||||
|
||||
## Task 1: Split modals.html And Create Unit Modal Partial
|
||||
|
||||
**Files:**
|
||||
- Create: `web/core/html/modals.html`
|
||||
- Create: `web/feeder/html/unit-modal.html`
|
||||
|
||||
- [ ] **Step 1: Create core modals (without unit modal)**
|
||||
|
||||
Extract everything except the unit modal div from `web/html/modals.html` into a new file:
|
||||
|
||||
`web/core/html/modals.html`:
|
||||
```html
|
||||
<div class="modal hidden" id="equipmentModal">
|
||||
<!-- keep entire equipment modal as-is from current modals.html lines 53-87 -->
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="pointModal">
|
||||
<!-- keep entire point modal as-is from current modals.html lines 89-107 -->
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="sourceModal">
|
||||
<!-- keep entire source modal as-is from current modals.html lines 109-135 -->
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="pointBindingModal">
|
||||
<!-- keep entire binding modal as-is from current modals.html lines 137-163 -->
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="batchBindingModal">
|
||||
<!-- keep entire batch binding modal as-is from current modals.html lines 165-187 -->
|
||||
</div>
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Create feeder unit modal partial**
|
||||
|
||||
Extract the unit modal into its own file:
|
||||
|
||||
`web/feeder/html/unit-modal.html`:
|
||||
```html
|
||||
<div class="modal hidden" id="unitModal">
|
||||
<!-- keep entire unit modal as-is from current modals.html lines 1-51 -->
|
||||
</div>
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Verify both files contain all original modal content**
|
||||
|
||||
Check that the combined line count of the two new files matches the original `web/html/modals.html` (188 lines total, minus blank lines between sections).
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add web/core/html/modals.html web/feeder/html/unit-modal.html
|
||||
git commit -m "refactor(web): split modals into core and feeder unit-modal"
|
||||
```
|
||||
|
||||
## Task 2: Move Core HTML And CSS Into web/core
|
||||
|
||||
**Files:**
|
||||
- Move: `web/styles.css` → `web/core/styles.css`
|
||||
- Move: `web/html/source-panel.html` → `web/core/html/source-panel.html`
|
||||
- Move: `web/html/points-panel.html` → `web/core/html/points-panel.html`
|
||||
- Move: `web/html/equipment-panel.html` → `web/core/html/equipment-panel.html`
|
||||
- Move: `web/html/chart-panel.html` → `web/core/html/chart-panel.html`
|
||||
- Move: `web/html/log-stream-panel.html` → `web/core/html/log-stream-panel.html`
|
||||
- Move: `web/html/logs-panel.html` → `web/core/html/logs-panel.html`
|
||||
- Move: `web/html/api-doc-drawer.html` → `web/core/html/api-doc-drawer.html`
|
||||
|
||||
- [ ] **Step 1: Create core directories and move files**
|
||||
|
||||
```bash
|
||||
mkdir -p web/core/html
|
||||
git mv web/styles.css web/core/styles.css
|
||||
git mv web/html/source-panel.html web/core/html/source-panel.html
|
||||
git mv web/html/points-panel.html web/core/html/points-panel.html
|
||||
git mv web/html/equipment-panel.html web/core/html/equipment-panel.html
|
||||
git mv web/html/chart-panel.html web/core/html/chart-panel.html
|
||||
git mv web/html/log-stream-panel.html web/core/html/log-stream-panel.html
|
||||
git mv web/html/logs-panel.html web/core/html/logs-panel.html
|
||||
git mv web/html/api-doc-drawer.html web/core/html/api-doc-drawer.html
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git add web/core
|
||||
git commit -m "refactor(web): move shared HTML partials and CSS into web/core"
|
||||
```
|
||||
|
||||
## Task 3: Move Feeder-Specific HTML And All JS Into web/feeder
|
||||
|
||||
**Files:**
|
||||
- Move: `web/index.html` → `web/feeder/index.html`
|
||||
- Move: `web/html/topbar.html` → `web/feeder/html/topbar.html`
|
||||
- Move: `web/html/ops-panel.html` → `web/feeder/html/ops-panel.html`
|
||||
- Move: `web/js/*.js` → `web/feeder/js/*.js`
|
||||
- Delete: `web/html/modals.html` (replaced by split files in Task 1)
|
||||
|
||||
- [ ] **Step 1: Create feeder directories and move files**
|
||||
|
||||
```bash
|
||||
mkdir -p web/feeder/html web/feeder/js
|
||||
git mv web/html/topbar.html web/feeder/html/topbar.html
|
||||
git mv web/html/ops-panel.html web/feeder/html/ops-panel.html
|
||||
git mv web/js/api.js web/feeder/js/api.js
|
||||
git mv web/js/app.js web/feeder/js/app.js
|
||||
git mv web/js/chart.js web/feeder/js/chart.js
|
||||
git mv web/js/docs.js web/feeder/js/docs.js
|
||||
git mv web/js/dom.js web/feeder/js/dom.js
|
||||
git mv web/js/equipment.js web/feeder/js/equipment.js
|
||||
git mv web/js/events.js web/feeder/js/events.js
|
||||
git mv web/js/index.js web/feeder/js/index.js
|
||||
git mv web/js/logs.js web/feeder/js/logs.js
|
||||
git mv web/js/ops.js web/feeder/js/ops.js
|
||||
git mv web/js/points.js web/feeder/js/points.js
|
||||
git mv web/js/roles.js web/feeder/js/roles.js
|
||||
git mv web/js/sources.js web/feeder/js/sources.js
|
||||
git mv web/js/state.js web/feeder/js/state.js
|
||||
git mv web/js/units.js web/feeder/js/units.js
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Move index.html and delete old modals**
|
||||
|
||||
```bash
|
||||
git mv web/index.html web/feeder/index.html
|
||||
git rm web/html/modals.html
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Update feeder index.html to add unit-modal partial**
|
||||
|
||||
In `web/feeder/index.html`, change the modals partial line and add a unit-modal partial:
|
||||
|
||||
Before:
|
||||
```html
|
||||
<div data-partial="/ui/html/modals.html"></div>
|
||||
```
|
||||
|
||||
After:
|
||||
```html
|
||||
<div data-partial="/ui/html/modals.html"></div>
|
||||
<div data-partial="/ui/html/unit-modal.html"></div>
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Verify no files remain in old web/html and web/js directories**
|
||||
|
||||
```bash
|
||||
ls web/html/ 2>/dev/null && echo "ERROR: web/html still has files" || echo "OK: web/html is clean"
|
||||
ls web/js/ 2>/dev/null && echo "ERROR: web/js still has files" || echo "OK: web/js is clean"
|
||||
```
|
||||
|
||||
Expected: both directories are empty or deleted.
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add web/feeder
|
||||
git commit -m "refactor(web): move feeder HTML, JS, and index into web/feeder"
|
||||
```
|
||||
|
||||
## Task 4: Update Feeder Router To Use Fallback ServeDir
|
||||
|
||||
**Files:**
|
||||
- Modify: `crates/app_feeder_distributor/src/router.rs`
|
||||
|
||||
- [ ] **Step 1: Update the static file serving to use fallback chain**
|
||||
|
||||
In `crates/app_feeder_distributor/src/router.rs`, replace the current `/ui` nest:
|
||||
|
||||
Before:
|
||||
```rust
|
||||
.nest(
|
||||
"/ui",
|
||||
Router::new()
|
||||
.fallback_service(ServeDir::new("web").append_index_html_on_directories(true))
|
||||
.layer(axum::middleware::from_fn(no_cache)),
|
||||
)
|
||||
```
|
||||
|
||||
After:
|
||||
```rust
|
||||
.nest(
|
||||
"/ui",
|
||||
Router::new()
|
||||
.fallback_service(
|
||||
ServeDir::new("web/feeder")
|
||||
.append_index_html_on_directories(true)
|
||||
.fallback(ServeDir::new("web/core")),
|
||||
)
|
||||
.layer(axum::middleware::from_fn(no_cache)),
|
||||
)
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Verify feeder crate compiles**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cargo check -p app_feeder_distributor
|
||||
```
|
||||
|
||||
Expected: PASS
|
||||
|
||||
- [ ] **Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add crates/app_feeder_distributor/src/router.rs
|
||||
git commit -m "refactor(feeder): update static file serving for split web dirs"
|
||||
```
|
||||
|
||||
## Task 5: Create Operation-System Web Pages And Update Router
|
||||
|
||||
**Files:**
|
||||
- Create: `web/ops/index.html`
|
||||
- Create: `web/ops/html/topbar.html`
|
||||
- Create: `web/ops/js/index.js`
|
||||
- Create: `web/ops/js/app.js`
|
||||
- Modify: `crates/app_operation_system/src/router.rs`
|
||||
- Delete: `crates/app_operation_system/web/index.html`
|
||||
|
||||
- [ ] **Step 1: Create ops web scaffold**
|
||||
|
||||
`web/ops/index.html`:
|
||||
```html
|
||||
<!doctype html>
|
||||
<html lang="zh-CN">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>运转系统</title>
|
||||
<link rel="stylesheet" href="/ui/styles.css" />
|
||||
</head>
|
||||
<body>
|
||||
<div data-partial="/ui/html/topbar.html"></div>
|
||||
|
||||
<main>
|
||||
<div class="muted" style="padding:2rem;text-align:center">运转系统页面开发中</div>
|
||||
</main>
|
||||
|
||||
<script type="module" src="/ui/js/index.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
`web/ops/html/topbar.html`:
|
||||
```html
|
||||
<header class="topbar">
|
||||
<div class="title">运转系统</div>
|
||||
<div class="topbar-actions">
|
||||
<div class="status" id="statusText">
|
||||
<span class="ws-dot" id="wsDot"></span>
|
||||
<span id="wsLabel">连接中…</span>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
```
|
||||
|
||||
`web/ops/js/index.js`:
|
||||
```javascript
|
||||
async function loadPartial(slot) {
|
||||
const response = await fetch(slot.dataset.partial);
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to load partial: ${slot.dataset.partial}`);
|
||||
}
|
||||
|
||||
const html = await response.text();
|
||||
slot.insertAdjacentHTML("beforebegin", html);
|
||||
slot.remove();
|
||||
}
|
||||
|
||||
async function bootstrapPage() {
|
||||
const slots = Array.from(document.querySelectorAll("[data-partial]"));
|
||||
await Promise.all(slots.map((slot) => loadPartial(slot)));
|
||||
await import("./app.js");
|
||||
}
|
||||
|
||||
bootstrapPage().catch((error) => {
|
||||
document.body.innerHTML = `<pre>${error.message || String(error)}</pre>`;
|
||||
});
|
||||
```
|
||||
|
||||
`web/ops/js/app.js`:
|
||||
```javascript
|
||||
function bootstrap() {
|
||||
console.log("Operation system app initialized");
|
||||
}
|
||||
|
||||
bootstrap();
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Update ops router to use split web dirs**
|
||||
|
||||
Replace `crates/app_operation_system/src/router.rs`:
|
||||
|
||||
```rust
|
||||
use axum::{extract::State, routing::get, Router};
|
||||
use tower_http::services::ServeDir;
|
||||
|
||||
use crate::app::AppState;
|
||||
|
||||
async fn no_cache(req: axum::extract::Request, next: axum::middleware::Next) -> axum::response::Response {
|
||||
let mut response = next.run(req).await;
|
||||
response.headers_mut().insert(
|
||||
axum::http::header::CACHE_CONTROL,
|
||||
axum::http::HeaderValue::from_static("no-store"),
|
||||
);
|
||||
response
|
||||
}
|
||||
|
||||
pub fn build_router(state: AppState) -> Router {
|
||||
Router::new()
|
||||
.route("/api/health", get(health_check))
|
||||
.nest(
|
||||
"/ui",
|
||||
Router::new()
|
||||
.fallback_service(
|
||||
ServeDir::new("web/ops")
|
||||
.append_index_html_on_directories(true)
|
||||
.fallback(ServeDir::new("web/core")),
|
||||
)
|
||||
.layer(axum::middleware::from_fn(no_cache)),
|
||||
)
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
async fn health_check(State(state): State<AppState>) -> String {
|
||||
format!("{}:ok", state.app_name)
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Delete old ops web placeholder**
|
||||
|
||||
```bash
|
||||
git rm crates/app_operation_system/web/index.html
|
||||
rmdir crates/app_operation_system/web 2>/dev/null || true
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Verify ops crate compiles**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cargo check -p app_operation_system
|
||||
```
|
||||
|
||||
Expected: PASS
|
||||
|
||||
- [ ] **Step 5: Update ops router smoke test if needed**
|
||||
|
||||
Check `crates/app_operation_system/tests/router_smoke.rs` — if it references the old `WEB_ROOT` constant, update accordingly.
|
||||
|
||||
- [ ] **Step 6: Commit**
|
||||
|
||||
```bash
|
||||
git add web/ops crates/app_operation_system
|
||||
git commit -m "refactor(ops): add ops web scaffold and update router for split dirs"
|
||||
```
|
||||
|
||||
## Task 6: Delete Obsolete Root src/ Files
|
||||
|
||||
**Files:**
|
||||
- Delete: all 19 files under `src/`
|
||||
|
||||
- [ ] **Step 1: Verify all root src files are duplicates of crate files**
|
||||
|
||||
Run quick checks:
|
||||
|
||||
```bash
|
||||
diff src/config.rs crates/app_feeder_distributor/src/config.rs
|
||||
diff src/handler.rs crates/app_feeder_distributor/src/handler.rs
|
||||
diff src/middleware.rs crates/app_feeder_distributor/src/middleware.rs
|
||||
```
|
||||
|
||||
All should show no functional differences (only BOM or whitespace).
|
||||
|
||||
- [ ] **Step 2: Remove all root src files from git**
|
||||
|
||||
```bash
|
||||
git rm -r src/
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Verify workspace still builds**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
cargo check --workspace
|
||||
```
|
||||
|
||||
Expected: PASS (root src/ is not a workspace member, removing it changes nothing for the build)
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git commit -m "chore: remove obsolete root src/ (migrated to crates)"
|
||||
```
|
||||
|
||||
## Task 7: Update README
|
||||
|
||||
**Files:**
|
||||
- Modify: `README.md`
|
||||
|
||||
- [ ] **Step 1: Update the README to reflect the workspace structure**
|
||||
|
||||
Replace the outdated "后端结构" and add build instructions. Key sections to update:
|
||||
|
||||
- Remove references to `src/main.rs`, `src/handler`, `src/service`
|
||||
- Add workspace structure overview:
|
||||
|
||||
```markdown
|
||||
## 项目结构
|
||||
|
||||
```text
|
||||
plc_control/
|
||||
Cargo.toml # Workspace root
|
||||
crates/
|
||||
plc_platform_core/ # 共享平台核心库
|
||||
app_feeder_distributor/ # 投煤器布料机专用版
|
||||
app_operation_system/ # 运转系统专用版
|
||||
web/
|
||||
core/ # 共享 HTML/CSS(点位、设备、数据源等)
|
||||
feeder/ # 投煤器布料机页面 + JS
|
||||
ops/ # 运转系统页面 + JS
|
||||
```
|
||||
|
||||
## 构建
|
||||
|
||||
```powershell
|
||||
# 投煤器布料机
|
||||
cargo build -p app_feeder_distributor --release
|
||||
|
||||
# 运转系统
|
||||
cargo build -p app_operation_system --release
|
||||
```
|
||||
|
||||
## 部署
|
||||
|
||||
将编译产物和 `web/` 目录放在同一级目录下:
|
||||
|
||||
```text
|
||||
deploy/
|
||||
app_feeder_distributor.exe
|
||||
web/
|
||||
core/
|
||||
feeder/
|
||||
```
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git add README.md
|
||||
git commit -m "docs: update README for workspace and web split layout"
|
||||
```
|
||||
|
||||
## Task 8: Final Verification
|
||||
|
||||
- [ ] **Step 1: Run all tests**
|
||||
|
||||
```bash
|
||||
cargo test --workspace
|
||||
```
|
||||
|
||||
Expected: PASS
|
||||
|
||||
- [ ] **Step 2: Run release builds**
|
||||
|
||||
```bash
|
||||
cargo build -p app_feeder_distributor --release
|
||||
cargo build -p app_operation_system --release
|
||||
```
|
||||
|
||||
Expected: both produce binaries successfully.
|
||||
|
||||
- [ ] **Step 3: Verify web file layout**
|
||||
|
||||
```bash
|
||||
find web -type f | sort
|
||||
```
|
||||
|
||||
Expected: files organized under `web/core/`, `web/feeder/`, `web/ops/` only. No files remaining directly under `web/html/` or `web/js/`.
|
||||
|
||||
## Self-Review
|
||||
|
||||
### Spec coverage
|
||||
|
||||
- Web split into core + per-app directories: Tasks 1–5
|
||||
- Fallback ServeDir for transparent URL resolution: Tasks 4–5
|
||||
- Root src cleanup: Task 6
|
||||
- README update: Task 7
|
||||
- Build verification: Task 8
|
||||
|
||||
### Key design decision: ServeDir fallback
|
||||
|
||||
Using `ServeDir::new("web/feeder").fallback(ServeDir::new("web/core"))` means:
|
||||
- No URL changes needed in any HTML partial references or JS imports
|
||||
- App-specific files override core files of the same name (app takes priority)
|
||||
- Browser requests are resolved transparently through the chain
|
||||
|
||||
### Spec deviation: web directory location
|
||||
|
||||
The original design spec §8.4 suggested per-app web directories inside each crate (`app_feeder_distributor/web`, `app_operation_system/web`). This plan deliberately places web files at the workspace root (`web/core/`, `web/feeder/`, `web/ops/`) instead. Reason: enables the ServeDir fallback chain to share core assets without duplication, and avoids coupling web resources to Rust crate build paths. This is a justified departure from the spec.
|
||||
|
||||
### What this plan does NOT cover (deferred)
|
||||
|
||||
- `PlatformContext` completion (filling in pool/connection_manager/event_manager/ws_manager)
|
||||
- `config.rs` migration into shared core
|
||||
- `control/validator.rs` splitting
|
||||
- Event namespace migration at call sites
|
||||
- These are larger refactors that should be planned separately
|
||||
|
|
@ -0,0 +1,507 @@
|
|||
# 双专用版与共享核心库架构设计
|
||||
|
||||
日期:2026-04-14
|
||||
|
||||
## 1. 目标
|
||||
|
||||
在当前 `plc_control` 仓库基础上,重构为“共享平台核心库 + 两个专用上层应用”的结构,满足以下目标:
|
||||
|
||||
- 产出两个独立可执行程序:
|
||||
- 投煤器布料机专用版
|
||||
- 运转系统专用版
|
||||
- 两个程序各自带 Web 界面、独立路由、独立业务逻辑
|
||||
- 两个程序复用同一套 Rust 平台能力,避免重复维护 OPC UA、数据库、事件、设备点位等基础能力
|
||||
- 后续保留扩展空间,允许在同一套代码上再组合出“统一版单程序”产物
|
||||
|
||||
本次设计优先保证边界清晰、后续可维护,不追求第一阶段的抽象最少代码量。
|
||||
|
||||
## 2. 设计结论
|
||||
|
||||
本项目采用以下总体结构:
|
||||
|
||||
- `plc_platform_core`:共享平台核心库
|
||||
- `app_feeder_distributor`:投煤器布料机专用版
|
||||
- `app_operation_system`:运转系统专用版
|
||||
|
||||
这是推荐的第一阶段目标结构:
|
||||
|
||||
```text
|
||||
plc_control/
|
||||
Cargo.toml
|
||||
Cargo.lock
|
||||
crates/
|
||||
plc_platform_core/
|
||||
app_feeder_distributor/
|
||||
app_operation_system/
|
||||
```
|
||||
|
||||
未来如需支持“统一单程序版本”,再新增:
|
||||
|
||||
```text
|
||||
crates/
|
||||
app_unified/
|
||||
```
|
||||
|
||||
当前不引入“配置切换两套业务模式”的做法,也不把两套业务状态机强行抽成一套通用业务引擎。
|
||||
|
||||
## 3. 方案选择与取舍
|
||||
|
||||
本次在三个方案中选择方案 B。
|
||||
|
||||
### 方案 A:单仓库单 crate,多 bin
|
||||
|
||||
做法:
|
||||
- 保留当前工程主结构
|
||||
- 使用 `src/bin/*.rs` 增加多个入口
|
||||
|
||||
优点:
|
||||
- 改动最小
|
||||
- 最快得到两个可执行程序
|
||||
|
||||
缺点:
|
||||
- 共享边界不清晰
|
||||
- `main.rs` 装配逻辑会持续膨胀
|
||||
- 两套业务会继续在同一套模块里相互污染
|
||||
|
||||
### 方案 B:workspace + 共享核心库 + 两个业务应用
|
||||
|
||||
做法:
|
||||
- 根目录改为 Cargo workspace
|
||||
- 下沉平台能力到共享库
|
||||
- 两套业务各自保留自己的入口、路由、页面和控制逻辑
|
||||
|
||||
优点:
|
||||
- 平台层和业务层职责清晰
|
||||
- 两套专用系统可以独立发版
|
||||
- 后续新增第三套业务时仍可沿用同一模式
|
||||
|
||||
缺点:
|
||||
- 第一次重构成本高于方案 A
|
||||
|
||||
### 方案 C:workspace + 平台库 + 业务插件化
|
||||
|
||||
做法:
|
||||
- 在共享库上继续抽象统一业务接口或配置驱动模式
|
||||
|
||||
优点:
|
||||
- 理论抽象度最高
|
||||
|
||||
缺点:
|
||||
- 当前阶段过早抽象
|
||||
- 容易把两套差异明显的业务规则硬揉在一起
|
||||
|
||||
### 选择结论
|
||||
|
||||
采用方案 B。
|
||||
|
||||
原因:
|
||||
- 当前代码已经具备平台层与业务层雏形
|
||||
- 投煤器布料机与运转系统预计将长期演化为两套不同的业务规则与页面表达
|
||||
- 平台能力稳定、业务能力高变化,适合分层
|
||||
|
||||
## 4. 编译产物与发布模型
|
||||
|
||||
本设计明确支持以下发布模式:
|
||||
|
||||
### 4.1 两个独立应用
|
||||
|
||||
发布两个独立 exe:
|
||||
|
||||
- `app_feeder_distributor.exe`
|
||||
- `app_operation_system.exe`
|
||||
|
||||
适用场景:
|
||||
- 两套系统部署隔离
|
||||
- 发版节奏不同
|
||||
- 功能和界面差异持续扩大
|
||||
|
||||
### 4.2 单个统一应用
|
||||
|
||||
在不改变共享库设计的前提下,未来可新增 `app_unified`,将两套业务模块装配到同一个程序中。
|
||||
|
||||
适用场景:
|
||||
- 现场更希望只部署一个软件
|
||||
- 需要统一登录、统一菜单、统一入口
|
||||
|
||||
结论:
|
||||
- 当前阶段以“双独立应用”为主目标
|
||||
- 架构上保留“可组合出单一应用”的扩展空间
|
||||
|
||||
## 5. 模块边界
|
||||
|
||||
### 5.1 共享平台核心库边界
|
||||
|
||||
`plc_platform_core` 负责平台层能力,不直接绑定投煤器布料机或运转系统的业务语义。
|
||||
|
||||
建议纳入共享库的模块:
|
||||
|
||||
- `config`
|
||||
- `db`
|
||||
- `model`
|
||||
- `connection`
|
||||
- `telemetry`
|
||||
- `event` 的平台公共能力
|
||||
- `websocket`
|
||||
- `service`
|
||||
- `util`
|
||||
- `control/command`
|
||||
- `control/runtime`
|
||||
- `control/validator` 中的通用校验能力
|
||||
|
||||
共享库主要职责:
|
||||
|
||||
- 配置加载
|
||||
- 数据库初始化与访问
|
||||
- OPC UA 接入与重连
|
||||
- 点位读写与遥测订阅
|
||||
- 通用事件发布、持久化、广播
|
||||
- WebSocket 基础设施
|
||||
- 通用运行时状态存储
|
||||
- 通用控制命令封装
|
||||
|
||||
### 5.2 业务应用边界
|
||||
|
||||
两个 app crate 各自承载自己的业务能力。
|
||||
|
||||
`app_feeder_distributor` 负责:
|
||||
|
||||
- 投煤器/布料机控制状态机
|
||||
- 投煤器/布料机业务接口
|
||||
- 业务页面与交互
|
||||
- 业务事件定义
|
||||
- 业务运行时状态
|
||||
|
||||
`app_operation_system` 负责:
|
||||
|
||||
- 运转系统控制逻辑
|
||||
- 运转系统业务接口
|
||||
- 运转系统业务页面
|
||||
- 业务事件定义
|
||||
- 业务运行时状态
|
||||
|
||||
### 5.3 不建议先共用的模块
|
||||
|
||||
以下模块当前阶段不建议直接抽为统一业务层:
|
||||
|
||||
- `control/engine.rs`
|
||||
- `handler/control.rs`
|
||||
- 业务 Web 页面目录
|
||||
|
||||
原因:
|
||||
- 这些位置最容易掺杂业务状态机、联锁规则和页面表达逻辑
|
||||
- 强行共用会导致大量条件分支
|
||||
|
||||
## 6. 事件体系设计
|
||||
|
||||
### 6.1 总体原则
|
||||
|
||||
采用“代码层分层,存储层统一”的方案:
|
||||
|
||||
- 共享库定义平台级事件
|
||||
- 各业务应用定义自己的业务事件
|
||||
- 所有事件最终落同一张 `event` 表
|
||||
- `event_type` 使用命名空间字符串
|
||||
|
||||
### 6.2 平台事件
|
||||
|
||||
平台事件放在 `plc_platform_core::event`,只承载两个系统都需要的公共语义,例如:
|
||||
|
||||
- 数据源连接/断开
|
||||
- 通用控制命令已发送
|
||||
- 通用设备基础信息变更
|
||||
- 通用订阅或连接异常
|
||||
|
||||
### 6.3 业务事件
|
||||
|
||||
业务事件分别定义在各自 app 内。
|
||||
|
||||
投煤器布料机专用版示例:
|
||||
|
||||
- `feeder.auto_control_started`
|
||||
- `feeder.auto_control_stopped`
|
||||
- `feeder.fault_locked`
|
||||
- `feeder.fault_acked`
|
||||
- `feeder.comm_locked`
|
||||
- `feeder.unit_state_changed`
|
||||
- `feeder.rem_local`
|
||||
- `feeder.rem_recovered`
|
||||
|
||||
运转系统专用版示例:
|
||||
|
||||
- `ops.unit_started`
|
||||
- `ops.unit_stopped`
|
||||
- `ops.phase_changed`
|
||||
- `ops.interlock_triggered`
|
||||
- `ops.interlock_released`
|
||||
|
||||
### 6.4 存储设计
|
||||
|
||||
事件统一使用现有 `event` 表存储。
|
||||
|
||||
要求:
|
||||
|
||||
- `event_type` 必须使用命名空间,例如 `platform.*`、`feeder.*`、`ops.*`
|
||||
- `payload` 继续存结构化 JSON
|
||||
- 事件查询接口支持按命名空间或事件类型前缀过滤
|
||||
|
||||
本阶段不拆分为多张事件表。
|
||||
|
||||
原因:
|
||||
- 复用现有表结构和查询能力
|
||||
- 查询统一时间线更方便
|
||||
- 降低改造成本
|
||||
|
||||
## 7. 应用上下文设计
|
||||
|
||||
### 7.1 共享平台上下文
|
||||
|
||||
共享库定义平台上下文,承载所有公共资源:
|
||||
|
||||
```rust
|
||||
pub struct PlatformContext {
|
||||
pub config: AppConfig,
|
||||
pub pool: sqlx::PgPool,
|
||||
pub connection_manager: Arc<ConnectionManager>,
|
||||
pub event_manager: Arc<EventManager>,
|
||||
pub ws_manager: Arc<WebSocketManager>,
|
||||
}
|
||||
```
|
||||
|
||||
职责:
|
||||
|
||||
- 提供公共资源句柄
|
||||
- 提供平台初始化入口
|
||||
- 支撑通用 service、事件、WebSocket 能力
|
||||
|
||||
### 7.2 业务应用状态
|
||||
|
||||
每个 app 在平台上下文之上增加自己的运行态:
|
||||
|
||||
```rust
|
||||
pub struct FeederAppState {
|
||||
pub platform: Arc<PlatformContext>,
|
||||
pub runtime: Arc<FeederRuntimeStore>,
|
||||
}
|
||||
|
||||
pub struct OperationAppState {
|
||||
pub platform: Arc<PlatformContext>,
|
||||
pub runtime: Arc<OperationRuntimeStore>,
|
||||
}
|
||||
```
|
||||
|
||||
设计原则:
|
||||
|
||||
- 平台上下文不持有业务特有运行态
|
||||
- 业务运行态不回流污染共享库
|
||||
- 每个业务程序保持自己的状态边界
|
||||
|
||||
## 8. 路由与前端拆分
|
||||
|
||||
### 8.1 平台通用 API
|
||||
|
||||
以下接口倾向保留为平台公共能力:
|
||||
|
||||
- `source`
|
||||
- `point`
|
||||
- `equipment`
|
||||
- `tag`
|
||||
- `page`
|
||||
- `logs`
|
||||
- 文档接口
|
||||
- 通用事件查询接口
|
||||
|
||||
这些能力更偏基础平台,不依赖单一业务系统。
|
||||
|
||||
### 8.2 业务专用 API
|
||||
|
||||
投煤器布料机专用版保留自己的业务接口,例如:
|
||||
|
||||
- 自动控制启停
|
||||
- 故障确认
|
||||
- 运行时状态查询
|
||||
- 业务详情页接口
|
||||
|
||||
运转系统专用版保留自己的业务接口,例如:
|
||||
|
||||
- 运转单元控制
|
||||
- 联锁状态
|
||||
- 运行阶段与流程查询
|
||||
|
||||
### 8.3 统一版扩展策略
|
||||
|
||||
如果未来新增 `app_unified`,建议在统一程序内按前缀挂业务路由,例如:
|
||||
|
||||
- `/api/feeder/...`
|
||||
- `/api/ops/...`
|
||||
|
||||
### 8.4 Web 资源拆分
|
||||
|
||||
不继续维持一个共享 `web/` 目录承载两套业务页面。
|
||||
|
||||
建议:
|
||||
|
||||
- `app_feeder_distributor/web`
|
||||
- `app_operation_system/web`
|
||||
|
||||
未来若有统一版,再单独提供:
|
||||
|
||||
- `app_unified/web`
|
||||
|
||||
理由:
|
||||
- 页面层是业务表达层,最容易分叉
|
||||
- 强行共用会导致样式、导航、面板和脚本互相污染
|
||||
|
||||
## 9. 现有文件迁移建议
|
||||
|
||||
### 9.1 优先迁入共享库
|
||||
|
||||
建议优先迁入 `plc_platform_core` 的现有文件:
|
||||
|
||||
- `src/config.rs`
|
||||
- `src/db.rs`
|
||||
- `src/model.rs`
|
||||
- `src/connection.rs`
|
||||
- `src/telemetry.rs`
|
||||
- `src/event.rs` 中的平台公共部分
|
||||
- `src/websocket.rs`
|
||||
- `src/service.rs`
|
||||
- `src/service/`
|
||||
- `src/util.rs`
|
||||
- `src/util/`
|
||||
- `src/control/command.rs`
|
||||
- `src/control/runtime.rs`
|
||||
|
||||
### 9.2 迁入共享库前需拆分
|
||||
|
||||
- `src/control/validator.rs`
|
||||
- 保留通用前置校验到共享库
|
||||
- 业务特殊校验留在各业务应用层
|
||||
|
||||
- `src/handler/`
|
||||
- 平台通用 handler 可保留为共享路由组件
|
||||
- 业务控制 handler 留在各自 app
|
||||
|
||||
### 9.3 保留在业务应用
|
||||
|
||||
优先保留在业务应用层的模块:
|
||||
|
||||
- `src/main.rs`
|
||||
- `src/control/engine.rs`
|
||||
- `src/control/simulate.rs`
|
||||
- `src/handler/control.rs`
|
||||
- 业务页面资源 `web/`
|
||||
|
||||
## 10. 迁移顺序
|
||||
|
||||
### 阶段 1:整理当前单体边界
|
||||
|
||||
目标:
|
||||
- 功能不变
|
||||
- 将启动逻辑、平台资源、业务运行态边界梳理清楚
|
||||
|
||||
工作:
|
||||
- 拆解 `main.rs` 启动装配逻辑
|
||||
- 明确平台资源与业务资源边界
|
||||
- 收口明显业务相关的控制模块
|
||||
|
||||
### 阶段 2:改造成 workspace
|
||||
|
||||
目标:
|
||||
- 仓库切换为 Cargo workspace
|
||||
- 当前程序仍能编译运行
|
||||
|
||||
工作:
|
||||
- 根目录改为 `[workspace]`
|
||||
- 新建 `plc_platform_core`
|
||||
- 新建第一个 app crate
|
||||
|
||||
### 阶段 3:抽共享平台库
|
||||
|
||||
目标:
|
||||
- 将平台能力逐步迁入共享库
|
||||
|
||||
建议迁移顺序:
|
||||
|
||||
1. `config`
|
||||
2. `db`
|
||||
3. `model`
|
||||
4. `util`
|
||||
5. `service`
|
||||
6. `telemetry`
|
||||
7. `connection`
|
||||
8. `websocket`
|
||||
9. `event`
|
||||
10. `control/runtime`
|
||||
11. `control/command`
|
||||
|
||||
每迁一批都要求:
|
||||
- 当前 app 能编译
|
||||
- 行为不回退
|
||||
- 引用关系清晰
|
||||
|
||||
### 阶段 4:固化第一套专用版
|
||||
|
||||
目标:
|
||||
- 将当前仓库能力收敛为第一套专用应用
|
||||
|
||||
根据当前代码现状,优先将当前系统固化为:
|
||||
- `app_feeder_distributor`
|
||||
|
||||
原因:
|
||||
- 当前控制引擎与文档明显更贴近投煤器/布料机控制业务
|
||||
|
||||
### 阶段 5:新增运转系统专用版
|
||||
|
||||
目标:
|
||||
- 基于共享库新增第二个业务应用
|
||||
|
||||
工作:
|
||||
- 新建 `app_operation_system`
|
||||
- 接入运转系统自己的控制逻辑、事件、页面和接口
|
||||
|
||||
### 阶段 6:按需增加统一版
|
||||
|
||||
目标:
|
||||
- 如有需要,再增加 `app_unified`
|
||||
|
||||
当前阶段不作为必须交付项。
|
||||
|
||||
## 11. 验收标准
|
||||
|
||||
完成本设计后的实施阶段,应至少达到以下结果:
|
||||
|
||||
- 仓库成为 workspace 结构
|
||||
- 共享库可独立被两个 app 依赖
|
||||
- `app_feeder_distributor` 可独立编译为 exe
|
||||
- `app_operation_system` 可独立编译为 exe
|
||||
- 两个 app 各自拥有独立 Web 页面
|
||||
- 事件体系支持平台事件与业务事件分层
|
||||
- 事件统一落到同一张 `event` 表并使用命名空间
|
||||
- 共享库不承载具体业务状态机语义
|
||||
|
||||
## 12. 风险与约束
|
||||
|
||||
### 12.1 当前主要风险
|
||||
|
||||
- 当前 `main.rs` 装配逻辑集中,拆分时容易引入依赖循环
|
||||
- 当前 `handler` 与 `control` 的边界仍有部分耦合
|
||||
- Web 前端目前是单目录,拆分业务页面时需要重新梳理资源结构
|
||||
|
||||
### 12.2 约束
|
||||
|
||||
- 第一阶段不重写现有全部业务逻辑
|
||||
- 第一阶段不做业务插件化抽象
|
||||
- 第一阶段不把两套业务强行配置化为同一状态机
|
||||
|
||||
## 13. 最终结论
|
||||
|
||||
本项目适合演进为:
|
||||
|
||||
- 一个共享平台核心库
|
||||
- 两个独立业务专用应用
|
||||
|
||||
共享层只承载通用平台能力,业务层分别承载投煤器布料机和运转系统的控制逻辑、事件和页面。
|
||||
|
||||
发布上以“双独立 exe”为第一目标,同时保留未来组合出“统一单程序版”的能力。
|
||||
|
||||
这是当前仓库在可维护性、扩展性和实施风险之间最平衡的方案。
|
||||
|
|
@ -0,0 +1,887 @@
|
|||
# 运转系统独立软件实现方案
|
||||
|
||||
> 文档日期:2026-04-14
|
||||
> 适用对象:运转系统独立软件立项、总体设计评审、实施拆解
|
||||
> 参考来源:`运转系统逻辑说明.doc`、当前 `plc_control` 仓库实现
|
||||
|
||||
## 1. 背景与目标
|
||||
|
||||
根据《运转系统控制逻辑说明书》,目标系统需要覆盖整条运转链路的自动控制与联锁保护,包括:
|
||||
|
||||
- 回转线空车回送系统
|
||||
- 前端码车道输送系统
|
||||
- 码车机械臂配合系统
|
||||
- 摆渡车转运系统
|
||||
- 1 号干燥窑运转系统
|
||||
- 1 号焙烧窑运转系统
|
||||
- 2 号干燥窑运转系统
|
||||
- 2 号焙烧窑运转系统
|
||||
- 窑尾下摆渡车及卸砖线系统
|
||||
- 卸砖机位及卸砖机配合系统
|
||||
|
||||
该说明书的核心控制原则不是“简单定时启停”,而是:
|
||||
|
||||
- 顺序控制
|
||||
- 联锁保护
|
||||
- 检测信号闭环确认
|
||||
- 异常停机与人工恢复
|
||||
- 双窑线并行与公共段协同
|
||||
|
||||
因此,本次实现不建议继续把需求压缩进当前仓库已有的投煤器/布料机控制模型,而应按一个独立软件来设计和交付。同时,当前仓库中已经成熟的一部分通用能力可以直接复用,避免重复建设。
|
||||
|
||||
本方案的目标是:
|
||||
|
||||
- 明确新软件的系统边界
|
||||
- 盘点当前仓库中可复用的通用能力
|
||||
- 设计适合运转系统的领域模型、控制引擎和前后端架构
|
||||
- 给出实施路径,兼顾首版交付和后续公共能力沉淀
|
||||
|
||||
---
|
||||
|
||||
## 2. 结论先行
|
||||
|
||||
推荐方案是:
|
||||
|
||||
- 将运转系统作为独立软件建设
|
||||
- 保留自己的数据库模型、控制引擎、前端页面和业务配置
|
||||
- 复用当前 `plc_control` 仓库中已经验证过的通用基础能力
|
||||
- 不直接沿用当前仓库以 `unit + run_time_sec + stop_time_sec + acc_time_sec + bl_time_sec` 为核心的控制业务模型
|
||||
|
||||
换句话说,处理原则不是“在当前项目里继续叠加一个特殊模块”,而是:
|
||||
|
||||
- 当前仓库作为技术参考和通用能力来源
|
||||
- 新软件作为运转系统业务主线
|
||||
- 通用能力先迁移复用,后续再视项目数量决定是否抽为共享模块
|
||||
|
||||
这是当前阶段风险最低、交付最稳、后续也最容易维护的路径。
|
||||
|
||||
---
|
||||
|
||||
## 3. 当前仓库能力盘点
|
||||
|
||||
### 3.1 已具备且适合复用的通用能力
|
||||
|
||||
结合 `README.md`、`API.md`、`src/main.rs` 以及当前代码结构,现仓库已经具备以下可复用能力。
|
||||
|
||||
#### 3.1.1 数据源与点位接入层
|
||||
|
||||
- OPC UA 数据源管理
|
||||
- 数据源重连
|
||||
- 节点浏览与保存
|
||||
- 点位实时订阅
|
||||
- 点位批量写入
|
||||
- 实时值与质量状态缓存
|
||||
|
||||
对应模块主要包括:
|
||||
|
||||
- `src/connection.rs`
|
||||
- `src/handler/source.rs`
|
||||
- `src/handler/point.rs`
|
||||
- `src/service/source.rs`
|
||||
- `src/service/point.rs`
|
||||
|
||||
这些能力与具体业务无强绑定,适合作为新软件的接入底座。
|
||||
|
||||
#### 3.1.2 设备/点位基础建模
|
||||
|
||||
当前仓库已经有较成熟的基础对象:
|
||||
|
||||
- 数据源 `source`
|
||||
- 设备 `equipment`
|
||||
- 点位 `point`
|
||||
- 标签 `tag`
|
||||
- 页面 `page`
|
||||
|
||||
其中 `equipment` 与 `point` 作为“现场对象抽象”依然有复用价值,尤其适合承载:
|
||||
|
||||
- 门机
|
||||
- 顶车机
|
||||
- 拉引机
|
||||
- 摆渡车
|
||||
- 步进机
|
||||
- 卸砖机
|
||||
- 机械臂状态点
|
||||
- 各工位检测点
|
||||
|
||||
#### 3.1.3 实时事件与前后端通讯
|
||||
|
||||
当前仓库已经具备:
|
||||
|
||||
- WebSocket 实时推送
|
||||
- SSE 日志流
|
||||
- 统一事件入口
|
||||
- 事件落库与查询
|
||||
|
||||
对应模块包括:
|
||||
|
||||
- `src/event.rs`
|
||||
- `src/websocket.rs`
|
||||
- `src/handler/log.rs`
|
||||
|
||||
这些能力对运转系统同样成立,可用于:
|
||||
|
||||
- 动作执行结果推送
|
||||
- 工位状态变化推送
|
||||
- 联锁阻断告警推送
|
||||
- 顺控步骤变化推送
|
||||
- 操作日志与异常追踪
|
||||
|
||||
#### 3.1.4 基础 Web 与后端服务骨架
|
||||
|
||||
当前仓库已经具备:
|
||||
|
||||
- Axum 路由骨架
|
||||
- PostgreSQL 持久化
|
||||
- SQLx 服务层
|
||||
- 中间件
|
||||
- 前端静态页宿主
|
||||
- 单实例运行与托盘集成
|
||||
|
||||
这些都是独立软件首版可直接借鉴或迁移的基础设施。
|
||||
|
||||
### 3.2 当前仓库不适合作为运转系统主模型的部分
|
||||
|
||||
当前控制模型围绕 `control unit` 展开,核心语义是:
|
||||
|
||||
- 定时停止
|
||||
- 定时运行
|
||||
- 累计运行触发下一设备
|
||||
|
||||
这套模型适用于投煤器/布料机这类节拍式设备联动,但不适用于运转系统说明书中的整线逻辑,原因如下:
|
||||
|
||||
- 无法表达“工位占用/空位”驱动的逐段流转
|
||||
- 无法表达“开门 -> 门开确认 -> 主动作 -> 复位 -> 关门 -> 门关确认”的段内动作链
|
||||
- 无法表达摆渡车定位、窑口交接、下摆渡车接车等空间位置过程
|
||||
- 无法表达机械臂安全区互锁
|
||||
- 无法表达动作完成必须由检测闭环确认的控制要求
|
||||
- 无法清晰表达故障停留在当前步骤、禁止跳步恢复的原则
|
||||
|
||||
因此,当前 `unit` 运行态和引擎逻辑可以借鉴实现方法,但不应直接作为运转系统的业务核心模型。
|
||||
|
||||
---
|
||||
|
||||
## 4. 新软件的定位与边界
|
||||
|
||||
### 4.1 新软件定位
|
||||
|
||||
新软件定位为:
|
||||
|
||||
- 面向窑车运转系统的自动控制与监控平台
|
||||
- 以“工位流转 + 执行机构动作 + 联锁条件 + 完成确认 + 异常恢复”为核心
|
||||
- 支持双窑线并行与公共段共享
|
||||
- 支持自动、远程手动、就地切换下的系统行为约束
|
||||
|
||||
### 4.2 与当前仓库的关系
|
||||
|
||||
推荐关系如下:
|
||||
|
||||
- 当前仓库不是运转系统的软件主体
|
||||
- 当前仓库是通用能力来源和技术参考
|
||||
- 首期开发可复制已有通用模块到新项目中使用
|
||||
- 后续如果存在多个同类项目,再考虑把共性抽为共享库
|
||||
|
||||
### 4.3 本次不建议的做法
|
||||
|
||||
不建议:
|
||||
|
||||
- 在当前仓库里继续叠加大量运转系统特有表结构和控制逻辑
|
||||
- 把运转系统说明书中的流程强行映射为现有 `unit` 状态机
|
||||
- 一开始就为了“平台化”设计过度通用的规则解释器
|
||||
|
||||
首版目标应该是:既能落地当前项目需求,又不给后续维护制造过多历史包袱。
|
||||
|
||||
---
|
||||
|
||||
## 5. 推荐的总体架构
|
||||
|
||||
### 5.1 架构分层
|
||||
|
||||
建议将新软件分为四层:
|
||||
|
||||
#### 第一层:接入层
|
||||
|
||||
职责:
|
||||
|
||||
- 对接 OPC UA 或其他现场协议
|
||||
- 订阅实时点位
|
||||
- 写入控制命令
|
||||
- 管理连接状态和信号质量
|
||||
|
||||
该层优先复用当前仓库的实现思路与模块结构。
|
||||
|
||||
#### 第二层:现场对象层
|
||||
|
||||
职责:
|
||||
|
||||
- 管理设备
|
||||
- 管理点位
|
||||
- 管理工位
|
||||
- 管理运输段
|
||||
- 管理信号映射
|
||||
|
||||
该层负责把现场对象标准化,形成“控制引擎可理解的对象模型”。
|
||||
|
||||
#### 第三层:顺控引擎层
|
||||
|
||||
职责:
|
||||
|
||||
- 执行每一段顺控逻辑
|
||||
- 处理段内步骤推进
|
||||
- 校验联锁
|
||||
- 等待闭环确认
|
||||
- 处理超时、故障和恢复
|
||||
- 协调双窑线与公共段
|
||||
|
||||
这是新软件的核心,不适合直接沿用当前仓库的 `unit` 引擎,需要重新设计。
|
||||
|
||||
#### 第四层:操作与监控层
|
||||
|
||||
职责:
|
||||
|
||||
- 实时显示工位状态和设备状态
|
||||
- 展示当前步骤和阻断原因
|
||||
- 提供手动控制与自动启停入口
|
||||
- 展示报警、事件、操作记录和日志
|
||||
|
||||
### 5.2 核心设计思想
|
||||
|
||||
运转系统的控制对象不是“单台设备”,而是“工艺段中的动作序列”。
|
||||
|
||||
因此核心对象应该从“设备”提升到“流程段”。
|
||||
|
||||
建议将整套系统拆成若干独立但可协同的流程段,例如:
|
||||
|
||||
- 回车线入口接车段
|
||||
- 回车线前移段
|
||||
- 前端码车位进车段
|
||||
- 机械臂码坯协同段
|
||||
- 码坯完成放车段
|
||||
- 前端摆渡分配段
|
||||
- 1 号干燥窑进口段
|
||||
- 1 号干燥窑内前移段
|
||||
- 1 号干燥窑出口段
|
||||
- 1 号焙烧窑进口段
|
||||
- 1 号焙烧窑内前移段
|
||||
- 1 号焙烧窑出口段
|
||||
- 2 号线对应各段
|
||||
- 窑尾摆渡接车段
|
||||
- 下摆渡车段
|
||||
- 卸砖线步进段
|
||||
- 卸砖机位协同段
|
||||
|
||||
每个段都遵循统一状态机模板,但参数、设备映射和检测点不同。
|
||||
|
||||
---
|
||||
|
||||
## 6. 领域模型设计
|
||||
|
||||
### 6.1 需要保留的基础对象
|
||||
|
||||
建议保留并延续以下基础对象概念:
|
||||
|
||||
- `source`:现场数据源
|
||||
- `equipment`:执行机构或检测对象
|
||||
- `point`:点位
|
||||
|
||||
但需要扩展出新的运转系统领域对象。
|
||||
|
||||
### 6.2 新增领域对象建议
|
||||
|
||||
#### 6.2.1 工位 `station`
|
||||
|
||||
表示产线上的一个位置或交接位,例如:
|
||||
|
||||
- 码车位
|
||||
- 摆渡车前端接车位
|
||||
- 1 号干燥窑进口位
|
||||
- 1 号干燥窑某中间工位
|
||||
- 焙烧窑出口位
|
||||
- 窑尾摆渡接车位
|
||||
- 卸砖线入口位
|
||||
- 卸砖机位
|
||||
- 回车线入口位
|
||||
|
||||
建议字段:
|
||||
|
||||
- `id`
|
||||
- `code`
|
||||
- `name`
|
||||
- `line_code`
|
||||
- `segment_code`
|
||||
- `station_type`
|
||||
- `enabled`
|
||||
- `description`
|
||||
|
||||
#### 6.2.2 工位信号映射 `station_signal`
|
||||
|
||||
用于将工位和检测信号绑定起来,例如:
|
||||
|
||||
- 有车检测
|
||||
- 空位判定
|
||||
- 到位确认
|
||||
- 允许接车
|
||||
- 完成信号
|
||||
|
||||
#### 6.2.3 流程段 `process_segment`
|
||||
|
||||
表示一个可独立调度的控制段。
|
||||
|
||||
例如:
|
||||
|
||||
- `front_feed_to_load_station`
|
||||
- `robot_palletize`
|
||||
- `front_release_to_transfer_car`
|
||||
- `kiln1_dry_infeed`
|
||||
- `kiln1_dry_step`
|
||||
- `kiln1_dry_outfeed`
|
||||
- `tail_transfer_to_unload`
|
||||
|
||||
建议字段:
|
||||
|
||||
- `id`
|
||||
- `code`
|
||||
- `name`
|
||||
- `segment_type`
|
||||
- `line_code`
|
||||
- `priority`
|
||||
- `enabled`
|
||||
- `mode`
|
||||
|
||||
#### 6.2.4 段步骤 `segment_step`
|
||||
|
||||
表示一个流程段内的顺控步骤。
|
||||
|
||||
建议字段:
|
||||
|
||||
- `id`
|
||||
- `segment_id`
|
||||
- `step_no`
|
||||
- `step_code`
|
||||
- `step_type`
|
||||
- `action_kind`
|
||||
- `timeout_ms`
|
||||
- `next_step_on_success`
|
||||
- `next_step_on_failure`
|
||||
|
||||
#### 6.2.5 联锁规则 `interlock_rule`
|
||||
|
||||
表示动作启动前或运行中的禁止条件、停机条件、允许条件。
|
||||
|
||||
规则类型可包括:
|
||||
|
||||
- 启动允许
|
||||
- 启动禁止
|
||||
- 运行中停机
|
||||
- 完成判定
|
||||
- 复位判定
|
||||
|
||||
#### 6.2.6 动作模板 `action_template`
|
||||
|
||||
用于描述典型动作,减少重复配置,例如:
|
||||
|
||||
- 开门
|
||||
- 关门
|
||||
- 顶车前进
|
||||
- 顶车后退
|
||||
- 拉引启动
|
||||
- 拉引复位
|
||||
- 摆渡车定位到某站
|
||||
- 步进机执行一步
|
||||
- 发放机械臂允许
|
||||
|
||||
#### 6.2.7 段运行态 `segment_runtime`
|
||||
|
||||
替代当前仓库的 `unit runtime`,记录:
|
||||
|
||||
- 当前状态
|
||||
- 当前步骤
|
||||
- 当前阻断原因
|
||||
- 当前动作开始时间
|
||||
- 最近一次成功完成时间
|
||||
- 当前故障锁定状态
|
||||
- 当前人工确认需求
|
||||
|
||||
### 6.3 参数化双窑线
|
||||
|
||||
说明书明确指出 1 号窑线与 2 号窑线控制原则一致,仅设备编号和工位编号不同。
|
||||
|
||||
因此建议设计成:
|
||||
|
||||
- 一套流程段模板
|
||||
- 两套配置实例
|
||||
|
||||
不要写两套硬编码逻辑。参数化维度包括:
|
||||
|
||||
- 设备编号
|
||||
- 站位编号
|
||||
- 门机/顶车机/拉引机映射
|
||||
- 进出口工位
|
||||
- 信号点位绑定
|
||||
|
||||
---
|
||||
|
||||
## 7. 顺控引擎设计
|
||||
|
||||
### 7.1 总体原则
|
||||
|
||||
顺控引擎必须遵循说明书中的原则:
|
||||
|
||||
- 上一步未完成,不能进入下一步
|
||||
- 动作完成不能只依赖时间,必须结合检测反馈
|
||||
- 完全联锁、故障联锁、门位联锁、机械臂联锁优先于普通顺控
|
||||
- 故障后保持当前步骤,不允许强行跳步恢复
|
||||
|
||||
### 7.2 段级状态机
|
||||
|
||||
建议每个 `process_segment` 使用统一状态机框架:
|
||||
|
||||
- `Idle`
|
||||
- `Checking`
|
||||
- `Executing`
|
||||
- `Confirming`
|
||||
- `Resetting`
|
||||
- `Completed`
|
||||
- `Blocked`
|
||||
- `Faulted`
|
||||
- `ManualInterventionRequired`
|
||||
|
||||
这与说明书建议的步骤结构一致,可映射为:
|
||||
|
||||
- Step 0:等待
|
||||
- Step 1:条件检查
|
||||
- Step 2:执行动作
|
||||
- Step 3:到位确认
|
||||
- Step 4:复位确认
|
||||
- Step 5:本段完成
|
||||
- Step 6:故障处理
|
||||
|
||||
### 7.3 统一段内执行模型
|
||||
|
||||
每个段的执行循环建议统一为:
|
||||
|
||||
1. 读取当前段配置
|
||||
2. 读取当前步骤定义
|
||||
3. 拉取关联设备和工位信号实时值
|
||||
4. 校验启动允许与禁止条件
|
||||
5. 发出动作命令
|
||||
6. 等待执行反馈
|
||||
7. 校验完成信号
|
||||
8. 校验复位信号
|
||||
9. 写入本段完成或转入异常状态
|
||||
|
||||
### 7.4 典型动作链模板
|
||||
|
||||
针对窑门相关段,建议内建标准动作模板:
|
||||
|
||||
1. 开门
|
||||
2. 门开到位确认
|
||||
3. 主动作执行
|
||||
4. 主动作完成确认
|
||||
5. 执行机构复位
|
||||
6. 复位确认
|
||||
7. 关门
|
||||
8. 门关到位确认
|
||||
9. 段完成
|
||||
|
||||
这样可以直接匹配说明书中的:
|
||||
|
||||
- 干燥窑进口段
|
||||
- 干燥窑出口段
|
||||
- 焙烧窑进口段
|
||||
- 焙烧窑出口段
|
||||
|
||||
### 7.5 公共段与并行段调度
|
||||
|
||||
整套系统存在以下并发特征:
|
||||
|
||||
- 双窑线并行
|
||||
- 前端码车系统是公共资源
|
||||
- 窑尾摆渡和卸砖线是公共资源
|
||||
- 回车线是公共资源
|
||||
|
||||
因此调度层必须支持:
|
||||
|
||||
- 段独立运行
|
||||
- 公共资源互斥
|
||||
- 基于优先级或空位状态的放行决策
|
||||
- 上下游交接信号驱动
|
||||
|
||||
建议在引擎中增加“资源占用”概念,用于控制:
|
||||
|
||||
- 摆渡车一次只能服务一个目标位
|
||||
- 卸砖机位忙碌时禁止上游送车
|
||||
- 机械臂占用作业区时禁止码车道送车
|
||||
|
||||
---
|
||||
|
||||
## 8. 联锁与异常处理设计
|
||||
|
||||
### 8.1 联锁优先级
|
||||
|
||||
联锁优先级建议按以下顺序处理:
|
||||
|
||||
1. 安全联锁
|
||||
2. 完全联锁
|
||||
3. 故障联锁
|
||||
4. 门位联锁
|
||||
5. 机械臂联锁
|
||||
6. 工艺允许条件
|
||||
7. 普通顺控条件
|
||||
|
||||
高优先级联锁一旦不满足,低优先级不再继续判断。
|
||||
|
||||
### 8.2 通用允许条件
|
||||
|
||||
任何动作启动前至少检查:
|
||||
|
||||
- 目标工位空位
|
||||
- 本工位有车或动作前提成立
|
||||
- 执行机构原位
|
||||
- 相关门位正确
|
||||
- 设备无故障
|
||||
- 安全联锁正常
|
||||
- 信号质量正常
|
||||
- 当前资源未被占用
|
||||
|
||||
### 8.3 通用禁止条件
|
||||
|
||||
出现以下任一情况,应禁止动作启动:
|
||||
|
||||
- 目标工位被占用
|
||||
- 执行机构不在原位
|
||||
- 门未开到位或门未关到位
|
||||
- 摆渡车未定位
|
||||
- 机械臂未退出安全区
|
||||
- 卸砖位忙碌
|
||||
- 信号冲突
|
||||
- 就地模式或远程未就绪
|
||||
|
||||
### 8.4 通用停机条件
|
||||
|
||||
运行中出现以下任一情况,应立刻停止并报警:
|
||||
|
||||
- 设备故障
|
||||
- 动作超时
|
||||
- 门位异常
|
||||
- 定位信号丢失
|
||||
- 安全联锁动作
|
||||
- 检测信号矛盾
|
||||
|
||||
### 8.5 故障恢复原则
|
||||
|
||||
故障处理必须遵循:
|
||||
|
||||
- 故障优先停止当前相关动作
|
||||
- 停机后保留当前步骤
|
||||
- 不允许自动跳到下一步骤
|
||||
- 故障解除后仍需满足复位条件
|
||||
- 对关键信号冲突场景,必须人工确认后恢复
|
||||
|
||||
建议引擎中明确区分:
|
||||
|
||||
- `fault_active`
|
||||
- `fault_latched`
|
||||
- `manual_ack_required`
|
||||
- `blocked_reason`
|
||||
|
||||
---
|
||||
|
||||
## 9. 前后端功能设计
|
||||
|
||||
### 9.1 后端接口建议
|
||||
|
||||
除了基础 `source / equipment / point` 管理接口,新软件建议新增以下业务接口。
|
||||
|
||||
#### 9.1.1 工位与段配置接口
|
||||
|
||||
- `GET /api/station`
|
||||
- `POST /api/station`
|
||||
- `PUT /api/station/{id}`
|
||||
- `DELETE /api/station/{id}`
|
||||
- `GET /api/process-segment`
|
||||
- `POST /api/process-segment`
|
||||
- `PUT /api/process-segment/{id}`
|
||||
- `GET /api/process-segment/{id}/detail`
|
||||
|
||||
#### 9.1.2 运行控制接口
|
||||
|
||||
- `POST /api/control/segment/{id}/start-auto`
|
||||
- `POST /api/control/segment/{id}/stop-auto`
|
||||
- `POST /api/control/segment/{id}/reset`
|
||||
- `POST /api/control/segment/{id}/ack-fault`
|
||||
- `POST /api/control/equipment/{id}/manual-action`
|
||||
|
||||
#### 9.1.3 运行态查询接口
|
||||
|
||||
- `GET /api/runtime/overview`
|
||||
- `GET /api/runtime/segment/{id}`
|
||||
- `GET /api/runtime/station/{id}`
|
||||
- `GET /api/alarm`
|
||||
- `GET /api/event`
|
||||
|
||||
### 9.2 WebSocket 推送建议
|
||||
|
||||
建议至少推送以下消息:
|
||||
|
||||
- 工位状态变化
|
||||
- 段运行态变化
|
||||
- 当前步骤变化
|
||||
- 联锁阻断变化
|
||||
- 报警创建与恢复
|
||||
- 设备动作结果
|
||||
|
||||
### 9.3 前端页面建议
|
||||
|
||||
#### 9.3.1 总览页面
|
||||
|
||||
展示:
|
||||
|
||||
- 整体流程图
|
||||
- 双窑线状态
|
||||
- 公共段状态
|
||||
- 关键堵点
|
||||
- 当前报警数
|
||||
|
||||
#### 9.3.2 运转监控页面
|
||||
|
||||
展示:
|
||||
|
||||
- 各工位有车/空位状态
|
||||
- 门位状态
|
||||
- 顶车机/拉引机/摆渡车/步进机状态
|
||||
- 当前正在执行的步骤
|
||||
- 当前阻断原因
|
||||
|
||||
#### 9.3.3 自动控制页面
|
||||
|
||||
展示:
|
||||
|
||||
- 各流程段自动状态
|
||||
- 步骤进度
|
||||
- 手动启停
|
||||
- 故障确认
|
||||
- 复位操作
|
||||
|
||||
#### 9.3.4 配置页面
|
||||
|
||||
展示:
|
||||
|
||||
- 设备管理
|
||||
- 点位管理
|
||||
- 工位管理
|
||||
- 段配置
|
||||
- 联锁规则配置
|
||||
- 日志与事件查看
|
||||
|
||||
---
|
||||
|
||||
## 10. 与当前仓库的复用策略
|
||||
|
||||
### 10.1 推荐策略
|
||||
|
||||
当前阶段推荐“先迁移复用,再视情况抽公共库”。
|
||||
|
||||
原因:
|
||||
|
||||
- 首版交付更快
|
||||
- 现仓库已有能力已经过实际实现验证
|
||||
- 现在立即抽共享库,容易因为运转系统领域模型尚未稳定而反复修改
|
||||
|
||||
### 10.2 复用方式
|
||||
|
||||
建议按三类处理。
|
||||
|
||||
#### 第一类:直接迁移复用
|
||||
|
||||
可直接迁移并少量改造:
|
||||
|
||||
- OPC UA 接入与重连
|
||||
- 点位订阅与实时缓存
|
||||
- 批量写点
|
||||
- WebSocket 管理
|
||||
- 事件总线
|
||||
- 日志流
|
||||
- 通用工具模块
|
||||
|
||||
#### 第二类:保留设计思路,重新实现
|
||||
|
||||
不建议直接复制,但可以借鉴结构:
|
||||
|
||||
- 控制引擎调度模型
|
||||
- 运行态缓存
|
||||
- 控制前置校验
|
||||
- 事件落库方式
|
||||
|
||||
#### 第三类:不继承旧业务模型
|
||||
|
||||
建议不继承:
|
||||
|
||||
- 当前 `unit` 业务语义
|
||||
- `run_time_sec / stop_time_sec / acc_time_sec / bl_time_sec` 这一套参数
|
||||
- 当前围绕投煤器/布料机定义的设备类型约定
|
||||
|
||||
### 10.3 后续公共化路径
|
||||
|
||||
如果后续存在多个类似项目,可分阶段抽象:
|
||||
|
||||
第一阶段:
|
||||
|
||||
- 新软件独立交付
|
||||
- 复用代码以复制迁移为主
|
||||
|
||||
第二阶段:
|
||||
|
||||
- 抽出 `plc_common` 之类的公共 Rust crate
|
||||
- 抽出前端通用实时组件
|
||||
- 统一数据源接入、事件总线、实时推送和写点命令封装
|
||||
|
||||
第三阶段:
|
||||
|
||||
- 再考虑更高层的流程引擎模板是否值得平台化
|
||||
|
||||
---
|
||||
|
||||
## 11. 实施阶段建议
|
||||
|
||||
### 阶段一:独立项目初始化
|
||||
|
||||
目标:
|
||||
|
||||
- 建立新仓库
|
||||
- 迁移通用接入层和实时通讯层
|
||||
- 跑通基础框架
|
||||
|
||||
工作内容:
|
||||
|
||||
- 初始化新项目结构
|
||||
- 迁移 `source / equipment / point` 基础能力
|
||||
- 跑通 OPC UA 数据源接入
|
||||
- 建立事件、WebSocket、日志链路
|
||||
|
||||
### 阶段二:领域模型与配置能力
|
||||
|
||||
目标:
|
||||
|
||||
- 建立工位、流程段、联锁规则等核心模型
|
||||
|
||||
工作内容:
|
||||
|
||||
- 设计数据库表
|
||||
- 建立后端模型与服务层
|
||||
- 建立配置接口
|
||||
- 搭建配置页面
|
||||
|
||||
### 阶段三:顺控引擎首版
|
||||
|
||||
目标:
|
||||
|
||||
- 跑通一条主流程
|
||||
|
||||
建议先实现:
|
||||
|
||||
- 前端码车位进车
|
||||
- 机械臂码坯协同
|
||||
- 码坯完成放车
|
||||
- 摆渡车前端分配
|
||||
- 2 号干燥窑进口段
|
||||
|
||||
先打通单条链路,验证段模型和联锁模型是否足够。
|
||||
|
||||
### 阶段四:双窑线与公共段扩展
|
||||
|
||||
目标:
|
||||
|
||||
- 完成双窑线、窑尾和回车线全流程
|
||||
|
||||
工作内容:
|
||||
|
||||
- 参数化复制 1 号线和 2 号线
|
||||
- 接入窑尾摆渡、卸砖线、回车线
|
||||
- 完善资源互斥与阻塞处理
|
||||
|
||||
### 阶段五:监控、报警与恢复完善
|
||||
|
||||
目标:
|
||||
|
||||
- 提升可运维性
|
||||
|
||||
工作内容:
|
||||
|
||||
- 完善报警分类
|
||||
- 增加报警确认和恢复记录
|
||||
- 增加动作追踪与步骤历史
|
||||
- 增加现场调试辅助页面
|
||||
|
||||
---
|
||||
|
||||
## 12. 风险与注意事项
|
||||
|
||||
### 12.1 最大风险不是接入,而是流程建模
|
||||
|
||||
当前仓库已经证明接入和基础实时能力不是主要风险。真正的风险在于:
|
||||
|
||||
- 工位划分是否清晰
|
||||
- 流程段切分是否合理
|
||||
- 联锁规则是否完整
|
||||
- 公共资源互斥是否严谨
|
||||
|
||||
因此实现前必须先完成工艺段拆分和信号清单整理。
|
||||
|
||||
### 12.2 不要过早追求全配置化
|
||||
|
||||
首版不要把所有逻辑都做成自由拼装规则引擎。更合理的做法是:
|
||||
|
||||
- 先定义少量固定动作模板
|
||||
- 先定义少量固定规则类型
|
||||
- 用“模板 + 参数”的方式覆盖当前项目
|
||||
|
||||
这样既能交付,也能控制复杂度。
|
||||
|
||||
### 12.3 需要形成明确的 I/O 对照表
|
||||
|
||||
说明书描述了大量逻辑关系,但软件落地前必须补足:
|
||||
|
||||
- 每个工位对应哪些检测点
|
||||
- 每个执行机构对应哪些命令点和反馈点
|
||||
- 每个完成条件由哪些信号组成
|
||||
- 哪些联锁是硬联锁,哪些是软联锁
|
||||
|
||||
没有这份映射表,软件方案再完整也无法落地。
|
||||
|
||||
---
|
||||
|
||||
## 13. 最终建议
|
||||
|
||||
综合当前需求和现仓库现状,最终建议如下:
|
||||
|
||||
- 运转系统按独立软件建设
|
||||
- 当前 `plc_control` 仓库只复用其通用基础能力
|
||||
- 业务核心模型改为“工位 + 流程段 + 步骤 + 联锁 + 完成确认”
|
||||
- 双窑线按模板参数化实现
|
||||
- 首版采用“先迁移复用、后沉淀公共模块”的策略
|
||||
|
||||
这样做可以同时满足三件事:
|
||||
|
||||
- 不破坏当前仓库已经成型的业务语义
|
||||
- 能完整承接运转系统说明书中的实际工艺需求
|
||||
- 为后续形成公共能力保留演进空间
|
||||
|
||||
---
|
||||
|
||||
## 14. 后续建议产出物
|
||||
|
||||
在本方案基础上,建议下一步继续形成以下文档:
|
||||
|
||||
- 运转系统软件功能清单
|
||||
- 工位与执行机构 I/O 对照表
|
||||
- 流程段拆分表
|
||||
- 联锁规则明细表
|
||||
- 数据库表设计文档
|
||||
- 顺控引擎详细设计
|
||||
- 前端页面原型
|
||||
|
||||
如果继续推进实施,下一份最关键的文档应是:
|
||||
|
||||
- 《运转系统流程段与 I/O 建模设计》
|
||||
|
||||
这份文档将直接决定后端模型和顺控引擎如何落地。
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
pub mod command;
|
||||
pub mod engine;
|
||||
pub mod runtime;
|
||||
pub mod simulate;
|
||||
pub mod validator;
|
||||
|
||||
use crate::telemetry::{DataValue, PointMonitorInfo};
|
||||
|
||||
pub(crate) fn monitor_value_as_bool(monitor: &PointMonitorInfo) -> bool {
|
||||
match monitor.value.as_ref() {
|
||||
Some(DataValue::Bool(v)) => *v,
|
||||
Some(DataValue::Int(v)) => *v != 0,
|
||||
Some(DataValue::UInt(v)) => *v != 0,
|
||||
Some(DataValue::Float(v)) => *v != 0.0,
|
||||
Some(DataValue::Text(v)) => {
|
||||
matches!(v.trim().to_ascii_lowercase().as_str(), "1" | "true" | "on" | "yes")
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
use axum::{
|
||||
http::{header, HeaderMap, HeaderValue, StatusCode},
|
||||
response::IntoResponse,
|
||||
};
|
||||
|
||||
use crate::util::response::ApiErr;
|
||||
|
||||
pub async fn get_api_md() -> Result<impl IntoResponse, ApiErr> {
|
||||
let content = tokio::fs::read_to_string("API.md")
|
||||
.await
|
||||
.map_err(|err| {
|
||||
tracing::error!("Failed to read API.md: {}", err);
|
||||
ApiErr::NotFound("API.md not found".to_string(), None)
|
||||
})?;
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
HeaderValue::from_static("text/markdown; charset=utf-8"),
|
||||
);
|
||||
|
||||
Ok((StatusCode::OK, headers, content))
|
||||
}
|
||||
|
||||
pub async fn get_readme_md() -> Result<impl IntoResponse, ApiErr> {
|
||||
let content = tokio::fs::read_to_string("README.md")
|
||||
.await
|
||||
.map_err(|err| {
|
||||
tracing::error!("Failed to read README.md: {}", err);
|
||||
ApiErr::NotFound("README.md not found".to_string(), None)
|
||||
})?;
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
header::CONTENT_TYPE,
|
||||
HeaderValue::from_static("text/markdown; charset=utf-8"),
|
||||
);
|
||||
|
||||
Ok((StatusCode::OK, headers, content))
|
||||
}
|
||||
414
src/main.rs
414
src/main.rs
|
|
@ -1,414 +0,0 @@
|
|||
#![cfg_attr(all(windows, not(debug_assertions)), windows_subsystem = "windows")]
|
||||
|
||||
mod control;
|
||||
mod config;
|
||||
mod connection;
|
||||
mod db;
|
||||
mod event;
|
||||
mod handler;
|
||||
mod middleware;
|
||||
mod model;
|
||||
mod service;
|
||||
mod telemetry;
|
||||
mod util;
|
||||
mod websocket;
|
||||
use axum::{
|
||||
routing::{get, post, put},
|
||||
Router,
|
||||
};
|
||||
use config::AppConfig;
|
||||
use connection::ConnectionManager;
|
||||
use db::init_database;
|
||||
use event::EventManager;
|
||||
use middleware::simple_logger;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc;
|
||||
use axum::{extract::Request, middleware::Next, response::Response};
|
||||
use tower_http::cors::{Any, CorsLayer};
|
||||
use tower_http::services::ServeDir;
|
||||
|
||||
async fn no_cache(req: Request, next: Next) -> Response {
|
||||
let mut res = next.run(req).await;
|
||||
res.headers_mut().insert(
|
||||
axum::http::header::CACHE_CONTROL,
|
||||
axum::http::HeaderValue::from_static("no-store"),
|
||||
);
|
||||
res
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
pub config: AppConfig,
|
||||
pub pool: sqlx::PgPool,
|
||||
pub connection_manager: Arc<ConnectionManager>,
|
||||
pub event_manager: Arc<EventManager>,
|
||||
pub ws_manager: Arc<websocket::WebSocketManager>,
|
||||
pub control_runtime: Arc<control::runtime::ControlRuntimeStore>,
|
||||
}
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
dotenv::dotenv().ok();
|
||||
util::log::init_logger();
|
||||
let _single_instance = match util::single_instance::try_acquire("PLCControl.Gateway") {
|
||||
Ok(guard) => guard,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => {
|
||||
tracing::warn!("Another PLC Control instance is already running");
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!("Failed to initialize single instance guard: {}", err);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let config = AppConfig::from_env().expect("Failed to load configuration");
|
||||
let pool = init_database(&config.database_url)
|
||||
.await
|
||||
.expect("Failed to initialize database");
|
||||
|
||||
let mut connection_manager = ConnectionManager::new();
|
||||
let ws_manager = Arc::new(websocket::WebSocketManager::new());
|
||||
let event_manager = Arc::new(EventManager::new(
|
||||
pool.clone(),
|
||||
Arc::new(connection_manager.clone()),
|
||||
Some(ws_manager.clone()),
|
||||
));
|
||||
connection_manager.set_event_manager(event_manager.clone());
|
||||
connection_manager.set_pool_and_start_reconnect_task(Arc::new(pool.clone()));
|
||||
|
||||
let connection_manager = Arc::new(connection_manager);
|
||||
let control_runtime = Arc::new(control::runtime::ControlRuntimeStore::new());
|
||||
|
||||
// Connect to all enabled sources concurrently
|
||||
let sources = service::get_all_enabled_sources(&pool)
|
||||
.await
|
||||
.expect("Failed to fetch sources");
|
||||
|
||||
// Spawn a task for each source to connect and subscribe concurrently
|
||||
let mut tasks = Vec::new();
|
||||
for source in sources {
|
||||
let cm = connection_manager.clone();
|
||||
let p = pool.clone();
|
||||
let source_name = source.name.clone();
|
||||
let source_id = source.id;
|
||||
|
||||
let task = tokio::spawn(async move {
|
||||
if let Err(e) = cm.connect_from_source(&p, source_id).await {
|
||||
tracing::error!("Failed to connect to source {}: {}", source_name, e);
|
||||
}
|
||||
});
|
||||
|
||||
tasks.push(task);
|
||||
}
|
||||
|
||||
// Wait for all connection tasks to complete
|
||||
for task in tasks {
|
||||
if let Err(e) = task.await {
|
||||
tracing::error!("Source connection task failed: {:?}", e);
|
||||
}
|
||||
}
|
||||
|
||||
let state = AppState {
|
||||
config: config.clone(),
|
||||
pool,
|
||||
connection_manager: connection_manager.clone(),
|
||||
event_manager,
|
||||
ws_manager,
|
||||
control_runtime: control_runtime.clone(),
|
||||
};
|
||||
control::engine::start(state.clone(), control_runtime);
|
||||
if config.simulate_plc {
|
||||
control::simulate::start(state.clone());
|
||||
}
|
||||
let app = build_router(state.clone());
|
||||
let addr = format!("{}:{}", config.server_host, config.server_port);
|
||||
tracing::info!("Starting server at http://{}", addr);
|
||||
let listener = tokio::net::TcpListener::bind(addr).await.unwrap();
|
||||
|
||||
let ui_url = format!("http://{}:{}/ui", "localhost", config.server_port);
|
||||
let (shutdown_tx, mut shutdown_rx) = mpsc::channel::<()>(1);
|
||||
let shutdown_tx_ctrl = shutdown_tx.clone();
|
||||
let rt_handle = tokio::runtime::Handle::current();
|
||||
init_tray(ui_url, shutdown_tx.clone(), rt_handle);
|
||||
|
||||
let connection_manager_for_shutdown = connection_manager.clone();
|
||||
tokio::spawn(async move {
|
||||
tokio::signal::ctrl_c()
|
||||
.await
|
||||
.expect("Failed to install Ctrl+C handler");
|
||||
let _ = shutdown_tx_ctrl.send(()).await;
|
||||
});
|
||||
|
||||
let shutdown_signal = async move {
|
||||
let _ = shutdown_rx.recv().await;
|
||||
tracing::info!("Received shutdown signal, closing all connections...");
|
||||
connection_manager_for_shutdown.disconnect_all().await;
|
||||
tracing::info!("All connections closed");
|
||||
};
|
||||
|
||||
axum::serve(listener, app)
|
||||
.with_graceful_shutdown(shutdown_signal)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn build_router(state: AppState) -> Router {
|
||||
let all_route = Router::new()
|
||||
.route(
|
||||
"/api/source",
|
||||
get(handler::source::get_source_list).post(handler::source::create_source),
|
||||
)
|
||||
.route(
|
||||
"/api/source/{source_id}",
|
||||
axum::routing::delete(handler::source::delete_source)
|
||||
.put(handler::source::update_source),
|
||||
)
|
||||
.route(
|
||||
"/api/source/{source_id}/reconnect",
|
||||
axum::routing::post(handler::source::reconnect_source),
|
||||
)
|
||||
.route(
|
||||
"/api/source/{source_id}/browse",
|
||||
axum::routing::post(handler::source::browse_and_save_nodes),
|
||||
)
|
||||
.route(
|
||||
"/api/source/{source_id}/node-tree",
|
||||
get(handler::source::get_node_tree),
|
||||
)
|
||||
.route("/api/point", get(handler::point::get_point_list))
|
||||
.route(
|
||||
"/api/point/value/batch",
|
||||
axum::routing::post(handler::point::batch_set_point_value),
|
||||
)
|
||||
.route(
|
||||
"/api/point/batch",
|
||||
axum::routing::post(handler::point::batch_create_points)
|
||||
.delete(handler::point::batch_delete_points),
|
||||
)
|
||||
.route(
|
||||
"/api/point/{point_id}/history",
|
||||
get(handler::point::get_point_history),
|
||||
)
|
||||
.route(
|
||||
"/api/point/{point_id}",
|
||||
get(handler::point::get_point)
|
||||
.put(handler::point::update_point)
|
||||
.delete(handler::point::delete_point),
|
||||
)
|
||||
.route(
|
||||
"/api/point/batch/set-tags",
|
||||
put(handler::point::batch_set_point_tags),
|
||||
)
|
||||
.route(
|
||||
"/api/point/batch/set-equipment",
|
||||
put(handler::point::batch_set_point_equipment),
|
||||
)
|
||||
.route(
|
||||
"/api/equipment",
|
||||
get(handler::equipment::get_equipment_list).post(handler::equipment::create_equipment),
|
||||
)
|
||||
.route(
|
||||
"/api/equipment/{equipment_id}",
|
||||
get(handler::equipment::get_equipment)
|
||||
.put(handler::equipment::update_equipment)
|
||||
.delete(handler::equipment::delete_equipment),
|
||||
)
|
||||
.route(
|
||||
"/api/equipment/batch/set-unit",
|
||||
put(handler::equipment::batch_set_equipment_unit),
|
||||
)
|
||||
.route(
|
||||
"/api/equipment/{equipment_id}/points",
|
||||
get(handler::equipment::get_equipment_points),
|
||||
)
|
||||
.route(
|
||||
"/api/unit",
|
||||
get(handler::control::get_unit_list).post(handler::control::create_unit),
|
||||
)
|
||||
.route(
|
||||
"/api/unit/{unit_id}",
|
||||
get(handler::control::get_unit)
|
||||
.put(handler::control::update_unit)
|
||||
.delete(handler::control::delete_unit),
|
||||
)
|
||||
.route(
|
||||
"/api/event",
|
||||
get(handler::control::get_event_list),
|
||||
)
|
||||
.route(
|
||||
"/api/control/equipment/{equipment_id}/start",
|
||||
post(handler::control::start_equipment),
|
||||
)
|
||||
.route(
|
||||
"/api/control/equipment/{equipment_id}/stop",
|
||||
post(handler::control::stop_equipment),
|
||||
)
|
||||
.route(
|
||||
"/api/control/unit/{unit_id}/start-auto",
|
||||
post(handler::control::start_auto_unit),
|
||||
)
|
||||
.route(
|
||||
"/api/control/unit/{unit_id}/stop-auto",
|
||||
post(handler::control::stop_auto_unit),
|
||||
)
|
||||
.route(
|
||||
"/api/control/unit/batch-start-auto",
|
||||
post(handler::control::batch_start_auto),
|
||||
)
|
||||
.route(
|
||||
"/api/control/unit/batch-stop-auto",
|
||||
post(handler::control::batch_stop_auto),
|
||||
)
|
||||
.route(
|
||||
"/api/control/unit/{unit_id}/ack-fault",
|
||||
post(handler::control::ack_fault_unit),
|
||||
)
|
||||
.route(
|
||||
"/api/unit/{unit_id}/runtime",
|
||||
get(handler::control::get_unit_runtime),
|
||||
)
|
||||
.route(
|
||||
"/api/unit/{unit_id}/detail",
|
||||
get(handler::control::get_unit_detail),
|
||||
)
|
||||
.route(
|
||||
"/api/tag",
|
||||
get(handler::tag::get_tag_list).post(handler::tag::create_tag),
|
||||
)
|
||||
.route(
|
||||
"/api/tag/{tag_id}",
|
||||
get(handler::tag::get_tag_points)
|
||||
.put(handler::tag::update_tag)
|
||||
.delete(handler::tag::delete_tag),
|
||||
)
|
||||
.route(
|
||||
"/api/page",
|
||||
get(handler::page::get_page_list).post(handler::page::create_page),
|
||||
)
|
||||
.route(
|
||||
"/api/page/{page_id}",
|
||||
get(handler::page::get_page)
|
||||
.put(handler::page::update_page)
|
||||
.delete(handler::page::delete_page),
|
||||
)
|
||||
.route("/api/logs", get(handler::log::get_logs))
|
||||
.route("/api/logs/stream", get(handler::log::stream_logs))
|
||||
.route("/api/docs/api-md", get(handler::doc::get_api_md))
|
||||
.route("/api/docs/readme-md", get(handler::doc::get_readme_md));
|
||||
|
||||
Router::new()
|
||||
.merge(all_route)
|
||||
.nest(
|
||||
"/ui",
|
||||
Router::new()
|
||||
.fallback_service(ServeDir::new("web").append_index_html_on_directories(true))
|
||||
.layer(axum::middleware::from_fn(no_cache)),
|
||||
)
|
||||
.route("/ws/public", get(websocket::public_websocket_handler))
|
||||
.route(
|
||||
"/ws/client/{client_id}",
|
||||
get(websocket::client_websocket_handler),
|
||||
)
|
||||
.layer(axum::middleware::from_fn(simple_logger))
|
||||
.layer(
|
||||
CorsLayer::new()
|
||||
.allow_origin(Any)
|
||||
.allow_methods(Any)
|
||||
.allow_headers(Any),
|
||||
)
|
||||
.with_state(state)
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn init_tray(ui_url: String, shutdown_tx: mpsc::Sender<()>, rt_handle: tokio::runtime::Handle) {
|
||||
std::thread::spawn(move || {
|
||||
if let Err(e) = tray::run_tray(ui_url, shutdown_tx, rt_handle) {
|
||||
tracing::warn!("Tray init failed: {}", e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn init_tray(_ui_url: String, _shutdown_tx: mpsc::Sender<()>, _rt_handle: tokio::runtime::Handle) {}
|
||||
|
||||
#[cfg(windows)]
|
||||
mod tray {
|
||||
use std::error::Error;
|
||||
use tokio::sync::mpsc;
|
||||
use tray_icon::{
|
||||
menu::{Menu, MenuEvent, MenuItem},
|
||||
Icon, TrayIconBuilder,
|
||||
};
|
||||
use winit::application::ApplicationHandler;
|
||||
use winit::event_loop::{ActiveEventLoop, ControlFlow, EventLoop};
|
||||
use winit::platform::windows::EventLoopBuilderExtWindows;
|
||||
|
||||
pub fn run_tray(
|
||||
ui_url: String,
|
||||
shutdown_tx: mpsc::Sender<()>,
|
||||
rt_handle: tokio::runtime::Handle,
|
||||
) -> Result<(), Box<dyn Error>> {
|
||||
let mut builder = EventLoop::builder();
|
||||
builder.with_any_thread(true);
|
||||
let event_loop = builder.build()?;
|
||||
|
||||
let menu = Menu::new();
|
||||
let open_item = MenuItem::new("Open UI", true, None);
|
||||
let exit_item = MenuItem::new("Exit", true, None);
|
||||
menu.append(&open_item)?;
|
||||
menu.append(&exit_item)?;
|
||||
|
||||
let icon = Icon::from_rgba(vec![0, 120, 212, 255], 1, 1)?;
|
||||
let _tray = TrayIconBuilder::new()
|
||||
.with_tooltip("PLC Control")
|
||||
.with_menu(Box::new(menu))
|
||||
.with_icon(icon)
|
||||
.build()?;
|
||||
|
||||
let menu_rx = MenuEvent::receiver();
|
||||
let mut app = TrayApp {
|
||||
menu_rx,
|
||||
open_id: open_item.id().clone(),
|
||||
exit_id: exit_item.id().clone(),
|
||||
ui_url,
|
||||
shutdown_tx,
|
||||
rt_handle,
|
||||
};
|
||||
|
||||
event_loop.run_app(&mut app).map_err(|e| e.into())
|
||||
}
|
||||
|
||||
struct TrayApp {
|
||||
menu_rx: &'static tray_icon::menu::MenuEventReceiver,
|
||||
open_id: tray_icon::menu::MenuId,
|
||||
exit_id: tray_icon::menu::MenuId,
|
||||
ui_url: String,
|
||||
shutdown_tx: mpsc::Sender<()>,
|
||||
rt_handle: tokio::runtime::Handle,
|
||||
}
|
||||
|
||||
impl ApplicationHandler for TrayApp {
|
||||
fn resumed(&mut self, _event_loop: &ActiveEventLoop) {}
|
||||
|
||||
fn window_event(
|
||||
&mut self,
|
||||
_event_loop: &ActiveEventLoop,
|
||||
_window_id: winit::window::WindowId,
|
||||
_event: winit::event::WindowEvent,
|
||||
) {
|
||||
}
|
||||
|
||||
fn about_to_wait(&mut self, event_loop: &ActiveEventLoop) {
|
||||
event_loop.set_control_flow(ControlFlow::Wait);
|
||||
while let Ok(menu_event) = self.menu_rx.try_recv() {
|
||||
if menu_event.id == self.open_id {
|
||||
let _ = webbrowser::open(&self.ui_url);
|
||||
}
|
||||
if menu_event.id == self.exit_id {
|
||||
let _ = self.rt_handle.block_on(self.shutdown_tx.send(()));
|
||||
event_loop.exit();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
<div class="drawer-backdrop hidden" id="apiDocDrawer">
|
||||
<aside class="drawer api-drawer" role="dialog" aria-modal="true" aria-labelledby="apiDocTitle">
|
||||
<div class="drawer-head">
|
||||
<h3 id="apiDocTitle">API.md</h3>
|
||||
<button type="button" class="secondary" id="closeApiDoc">关闭</button>
|
||||
</div>
|
||||
<div class="drawer-body">
|
||||
<aside class="doc-toc">
|
||||
<div class="doc-toc-title">目录</div>
|
||||
<div class="doc-toc-list" id="apiDocToc">加载中...</div>
|
||||
</aside>
|
||||
<div class="markdown-doc" id="apiDocContent">加载中...</div>
|
||||
</div>
|
||||
</aside>
|
||||
</div>
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
<section class="panel bottom-right">
|
||||
<div class="panel-head">
|
||||
<h2 id="chartTitle">点位曲线</h2>
|
||||
<button class="secondary" id="refreshChart">刷新</button>
|
||||
</div>
|
||||
<div class="chart-panel">
|
||||
<div class="muted" id="chartSummary">点击上方点位表中的一行查看曲线</div>
|
||||
<canvas id="chartCanvas" class="chart-canvas" width="820" height="320"></canvas>
|
||||
</div>
|
||||
</section>
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
<section class="panel top-left">
|
||||
<div class="panel-head">
|
||||
<h2>设备</h2>
|
||||
<button type="button" id="newEquipmentBtn">+ 新增</button>
|
||||
</div>
|
||||
<div class="toolbar equipment-toolbar">
|
||||
<input id="equipmentKeyword" placeholder="搜索编码或名称" />
|
||||
<button type="button" class="secondary" id="refreshEquipmentBtn">刷新</button>
|
||||
</div>
|
||||
<div class="toolbar equipment-batch-toolbar">
|
||||
<div class="muted" id="selectedEquipmentSummary">已选 0 台设备</div>
|
||||
<select id="equipmentBatchUnitId"></select>
|
||||
<button type="button" class="secondary" id="clearEquipmentSelectionBtn">清空选择</button>
|
||||
<button type="button" id="applyEquipmentUnitBtn">批量设单元</button>
|
||||
</div>
|
||||
<div class="list equipment-list" id="equipmentList"></div>
|
||||
</section>
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
<section class="panel bottom-mid">
|
||||
<div class="panel-head">
|
||||
<h2>实时日志</h2>
|
||||
</div>
|
||||
<div class="log" id="logView"></div>
|
||||
</section>
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
<section class="panel ops-bottom">
|
||||
<div class="panel-head">
|
||||
<h2>系统事件</h2>
|
||||
<button type="button" class="secondary" id="refreshEventBtn">刷新</button>
|
||||
</div>
|
||||
<div class="list event-list" id="eventList"></div>
|
||||
</section>
|
||||
|
|
@ -1,187 +0,0 @@
|
|||
<div class="modal hidden" id="unitModal">
|
||||
<div class="modal-content modal-sm">
|
||||
<div class="modal-head">
|
||||
<h3>控制单元配置</h3>
|
||||
<button class="secondary" id="closeUnitModal">X</button>
|
||||
</div>
|
||||
<form id="unitForm" class="form">
|
||||
<input type="hidden" id="unitId" />
|
||||
<label>
|
||||
编码
|
||||
<input id="unitCode" required />
|
||||
</label>
|
||||
<label>
|
||||
名称
|
||||
<input id="unitName" required />
|
||||
</label>
|
||||
<label>
|
||||
说明
|
||||
<input id="unitDescription" />
|
||||
</label>
|
||||
<label class="check-row">
|
||||
<input type="checkbox" id="unitEnabled" checked />
|
||||
<span>启用</span>
|
||||
</label>
|
||||
<label>
|
||||
投煤运行时间(秒)
|
||||
<input id="unitRunTimeSec" type="number" min="0" value="0" />
|
||||
</label>
|
||||
<label>
|
||||
投煤停止时间(秒)
|
||||
<input id="unitStopTimeSec" type="number" min="0" value="0" />
|
||||
</label>
|
||||
<label>
|
||||
投煤累计阈值(秒)
|
||||
<input id="unitAccTimeSec" type="number" min="0" value="0" />
|
||||
</label>
|
||||
<label>
|
||||
布料机运行时间(秒)
|
||||
<input id="unitBlTimeSec" type="number" min="0" value="0" />
|
||||
</label>
|
||||
<label class="check-row">
|
||||
<input type="checkbox" id="unitManualAck" checked />
|
||||
<span>故障恢复后需人工确认</span>
|
||||
</label>
|
||||
<div class="form-actions">
|
||||
<button type="button" class="secondary" id="unitReset">清空</button>
|
||||
<button type="submit" id="unitSubmit">保存</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="equipmentModal">
|
||||
<div class="modal-content modal-sm">
|
||||
<div class="modal-head">
|
||||
<h3>设备配置</h3>
|
||||
<button class="secondary" id="closeEquipmentModal">X</button>
|
||||
</div>
|
||||
<form id="equipmentForm" class="form">
|
||||
<input type="hidden" id="equipmentId" />
|
||||
<label>
|
||||
所属单元
|
||||
<select id="equipmentUnitId"></select>
|
||||
</label>
|
||||
<label>
|
||||
编码
|
||||
<input id="equipmentCode" required />
|
||||
</label>
|
||||
<label>
|
||||
名称
|
||||
<input id="equipmentName" required />
|
||||
</label>
|
||||
<label>
|
||||
类型
|
||||
<select id="equipmentKind"></select>
|
||||
</label>
|
||||
<label>
|
||||
说明
|
||||
<input id="equipmentDescription" />
|
||||
</label>
|
||||
<div class="form-actions">
|
||||
<button type="button" class="secondary" id="equipmentReset">清空</button>
|
||||
<button type="submit" id="equipmentSubmit">保存</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="pointModal">
|
||||
<div class="modal-content">
|
||||
<div class="modal-head">
|
||||
<h3>选择节点创建点位</h3>
|
||||
<button class="secondary" id="closeModal">X</button>
|
||||
</div>
|
||||
<div class="toolbar">
|
||||
<select id="pointSourceSelect"></select>
|
||||
<div class="muted" id="pointSourceNodeCount">Nodes: 0</div>
|
||||
<button id="browseNodes">加载节点</button>
|
||||
<button class="secondary" id="refreshTree">刷新树</button>
|
||||
</div>
|
||||
<div class="tree" id="nodeTree"></div>
|
||||
<div class="modal-foot">
|
||||
<div class="muted" id="selectedCount">已选中 0 个节点</div>
|
||||
<button id="createPoints">创建设备点位</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="sourceModal">
|
||||
<div class="modal-content modal-sm">
|
||||
<div class="modal-head">
|
||||
<h3>Source 配置</h3>
|
||||
<button class="secondary" id="closeSourceModal">X</button>
|
||||
</div>
|
||||
<form id="sourceForm" class="form">
|
||||
<input type="hidden" id="sourceId" />
|
||||
<label>
|
||||
名称
|
||||
<input id="sourceName" required />
|
||||
</label>
|
||||
<label>
|
||||
Endpoint
|
||||
<input id="sourceEndpoint" placeholder="opc.tcp://host:port" required />
|
||||
</label>
|
||||
<label class="check-row">
|
||||
<input type="checkbox" id="sourceEnabled" checked />
|
||||
<span>启用</span>
|
||||
</label>
|
||||
<div class="form-actions">
|
||||
<button type="button" class="secondary" id="sourceReset">清空</button>
|
||||
<button type="submit" id="sourceSubmit">保存</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="pointBindingModal">
|
||||
<div class="modal-content modal-sm">
|
||||
<div class="modal-head">
|
||||
<h3>绑定点位</h3>
|
||||
<button class="secondary" id="closePointBindingModal">X</button>
|
||||
</div>
|
||||
<form id="pointBindingForm" class="form">
|
||||
<input type="hidden" id="bindingPointId" />
|
||||
<label>
|
||||
点位
|
||||
<input id="bindingPointName" disabled />
|
||||
</label>
|
||||
<label>
|
||||
设备
|
||||
<select id="bindingEquipmentId"></select>
|
||||
</label>
|
||||
<label>
|
||||
角色模板
|
||||
<select id="bindingSignalRole"></select>
|
||||
</label>
|
||||
<div class="form-actions">
|
||||
<button type="button" class="secondary" id="clearPointBinding">清空绑定</button>
|
||||
<button type="submit" id="savePointBinding">保存</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="modal hidden" id="batchBindingModal">
|
||||
<div class="modal-content modal-sm">
|
||||
<div class="modal-head">
|
||||
<h3>批量绑定点位</h3>
|
||||
<button class="secondary" id="closeBatchBindingModal">X</button>
|
||||
</div>
|
||||
<form id="batchBindingForm" class="form">
|
||||
<div class="muted" id="batchBindingSummary">已选中 0 个点位</div>
|
||||
<label>
|
||||
设备
|
||||
<select id="batchBindingEquipmentId"></select>
|
||||
</label>
|
||||
<label>
|
||||
角色模板
|
||||
<select id="batchBindingSignalRole"></select>
|
||||
</label>
|
||||
<div class="form-actions">
|
||||
<button type="button" class="secondary" id="clearBatchBinding">清空设备和角色</button>
|
||||
<button type="submit" id="saveBatchBinding">批量保存</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
<section class="panel ops-main">
|
||||
<div class="ops-layout">
|
||||
<aside class="ops-unit-sidebar">
|
||||
<div class="panel-head">
|
||||
<h2>控制单元</h2>
|
||||
<div class="ops-batch-actions">
|
||||
<button type="button" class="secondary" id="batchStartAutoBtn" title="启动所有未锁定单元的自动控制">全部启动</button>
|
||||
<button type="button" class="danger" id="batchStopAutoBtn" title="停止所有单元的自动控制">全部停止</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="list ops-unit-list" id="opsUnitList"></div>
|
||||
</aside>
|
||||
<div class="ops-equipment-area" id="opsEquipmentArea">
|
||||
<div class="muted ops-placeholder">← 选择控制单元</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
<section class="panel top-right">
|
||||
<div class="panel-head">
|
||||
<h2>点位</h2>
|
||||
<div class="pager">
|
||||
<button class="secondary" id="prevPoints" title="上一页">‹</button>
|
||||
<span id="pointsPageInfo">1 / 1</span>
|
||||
<button class="secondary" id="nextPoints" title="下一页">›</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="toolbar point-batch-toolbar">
|
||||
<label class="check-row compact-check">
|
||||
<input type="checkbox" id="toggleAllPoints" />
|
||||
<span>本页全选</span>
|
||||
</label>
|
||||
<div class="muted" id="pointFilterSummary">当前筛选: 全部点位</div>
|
||||
<div class="muted" id="selectedPointCount">已选中 0 个点位</div>
|
||||
<button type="button" class="secondary" id="openPointModal">选入节点</button>
|
||||
<button type="button" class="secondary" id="openBatchBinding">批量绑定设备</button>
|
||||
<button type="button" class="secondary" id="clearSelectedPoints">清空选择</button>
|
||||
</div>
|
||||
<div class="table-wrap">
|
||||
<table class="data-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th style="width:6%"></th>
|
||||
<th style="width:22%">名称</th>
|
||||
<th style="width:16%">值</th>
|
||||
<th style="width:10%">质量</th>
|
||||
<th style="width:18%">设备/角色</th>
|
||||
<th style="width:21%">更新时间</th>
|
||||
<th style="width:120px"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="pointList"></tbody>
|
||||
</table>
|
||||
</div>
|
||||
</section>
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
<section class="panel bottom-left">
|
||||
<div class="stack-panel">
|
||||
<div class="stack-section">
|
||||
<div class="panel-head">
|
||||
<h2>控制单元</h2>
|
||||
<div class="toolbar">
|
||||
<button type="button" class="secondary" id="refreshUnitBtn">刷新</button>
|
||||
<button type="button" id="newUnitBtn">+ 新增</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="list unit-list" id="unitList"></div>
|
||||
</div>
|
||||
|
||||
<div class="stack-section stack-section-bordered">
|
||||
<div class="panel-head">
|
||||
<h2>数据源</h2>
|
||||
<button type="button" id="openSourceForm">+ 新增</button>
|
||||
</div>
|
||||
<div class="source-panels" id="sourceList"></div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
<header class="topbar">
|
||||
<div class="title">PLC Control</div>
|
||||
<div class="tab-bar">
|
||||
<button type="button" class="tab-btn active" id="tabOps">运维</button>
|
||||
<button type="button" class="tab-btn" id="tabConfig">配置</button>
|
||||
</div>
|
||||
<div class="topbar-actions">
|
||||
<button type="button" class="secondary" id="openReadmeDoc">README.md</button>
|
||||
<button type="button" class="secondary" id="openApiDoc">API.md</button>
|
||||
<div class="status" id="statusText">
|
||||
<span class="ws-dot" id="wsDot"></span>
|
||||
<span id="wsLabel">连接中…</span>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
<!doctype html>
|
||||
<html lang="zh-CN">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<title>PLC Control</title>
|
||||
<link rel="stylesheet" href="/ui/styles.css?v=20260325f" />
|
||||
</head>
|
||||
<body>
|
||||
<div data-partial="/ui/html/topbar.html"></div>
|
||||
|
||||
<main class="grid-ops">
|
||||
<div data-partial="/ui/html/ops-panel.html"></div>
|
||||
<div data-partial="/ui/html/equipment-panel.html"></div>
|
||||
<div data-partial="/ui/html/points-panel.html"></div>
|
||||
<div data-partial="/ui/html/source-panel.html"></div>
|
||||
<div data-partial="/ui/html/log-stream-panel.html"></div>
|
||||
<div data-partial="/ui/html/logs-panel.html"></div>
|
||||
<div data-partial="/ui/html/chart-panel.html"></div>
|
||||
</main>
|
||||
|
||||
<div data-partial="/ui/html/modals.html"></div>
|
||||
<div data-partial="/ui/html/api-doc-drawer.html"></div>
|
||||
|
||||
<script type="module" src="/ui/js/index.js?v=20260325f"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,87 +0,0 @@
|
|||
import { dom } from "./dom.js";
|
||||
|
||||
export function setStatus(text) {
|
||||
dom.statusText.textContent = text;
|
||||
}
|
||||
|
||||
// ── Toast ─────────────────────────────────────────
|
||||
|
||||
function getContainer() {
|
||||
let el = document.getElementById("toast-container");
|
||||
if (!el) {
|
||||
el = document.createElement("div");
|
||||
el.id = "toast-container";
|
||||
document.body.appendChild(el);
|
||||
}
|
||||
return el;
|
||||
}
|
||||
|
||||
const ICONS = { error: "✕", warning: "!", success: "✓", info: "i" };
|
||||
|
||||
/**
|
||||
* 显示 toast 通知。
|
||||
* @param {string} title 主要文字
|
||||
* @param {object} [opts]
|
||||
* @param {string} [opts.message] 次要说明文字
|
||||
* @param {"error"|"warning"|"success"|"info"} [opts.level="error"]
|
||||
* @param {number} [opts.duration=4000] 自动关闭毫秒数,0 表示不自动关闭
|
||||
* @param {boolean} [opts.shake=false] 出现时加抖动动画
|
||||
* @returns {{ dismiss: () => void }}
|
||||
*/
|
||||
export function showToast(title, { message, level = "error", duration = 4000, shake = false } = {}) {
|
||||
const container = getContainer();
|
||||
|
||||
const el = document.createElement("div");
|
||||
el.className = `toast ${level}${shake ? " shake" : ""}`;
|
||||
el.innerHTML = `
|
||||
<span class="toast-icon">${ICONS[level] ?? "i"}</span>
|
||||
<div class="toast-body">
|
||||
<div class="toast-title">${title}</div>
|
||||
${message ? `<div class="toast-message">${message}</div>` : ""}
|
||||
</div>`;
|
||||
|
||||
const dismiss = () => {
|
||||
if (!el.parentNode) return;
|
||||
el.classList.remove("shake");
|
||||
el.classList.add("hiding");
|
||||
el.addEventListener("animationend", () => el.remove(), { once: true });
|
||||
};
|
||||
|
||||
el.addEventListener("click", dismiss);
|
||||
container.appendChild(el);
|
||||
|
||||
if (duration > 0) setTimeout(dismiss, duration);
|
||||
return { dismiss };
|
||||
}
|
||||
|
||||
// ── apiFetch ──────────────────────────────────────
|
||||
|
||||
export async function apiFetch(url, options = {}) {
|
||||
const response = await fetch(url, {
|
||||
headers: { "Content-Type": "application/json" },
|
||||
...options,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = (await response.text()) || response.statusText;
|
||||
showToast(`请求失败 ${response.status}`, { message: text });
|
||||
throw new Error(text);
|
||||
}
|
||||
|
||||
if (response.status === 204) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const contentType = response.headers.get("content-type") || "";
|
||||
if (contentType.includes("application/json")) {
|
||||
return response.json();
|
||||
}
|
||||
|
||||
return response.text();
|
||||
}
|
||||
|
||||
export function withStatus(task) {
|
||||
return task.catch((error) => {
|
||||
setStatus(error.message || "请求失败");
|
||||
});
|
||||
}
|
||||
195
web/js/app.js
195
web/js/app.js
|
|
@ -1,195 +0,0 @@
|
|||
import { withStatus } from "./api.js";
|
||||
import { openChart, renderChart } from "./chart.js";
|
||||
import { dom } from "./dom.js";
|
||||
import { closeApiDocDrawer, openApiDocDrawer, openReadmeDrawer } from "./docs.js";
|
||||
import { loadEvents } from "./events.js";
|
||||
import {
|
||||
applyBatchEquipmentUnit,
|
||||
clearPointBinding,
|
||||
clearSelectedEquipments,
|
||||
closeEquipmentModal,
|
||||
loadEquipments,
|
||||
openCreateEquipmentModal,
|
||||
resetEquipmentForm,
|
||||
saveEquipment,
|
||||
} from "./equipment.js";
|
||||
import { startPointSocket, startLogs, stopLogs } from "./logs.js";
|
||||
import { startOps, renderOpsUnits, loadAllEquipmentCards } from "./ops.js";
|
||||
import {
|
||||
clearBatchBinding,
|
||||
browseAndLoadTree,
|
||||
clearSelectedPoints,
|
||||
createPoints,
|
||||
loadPoints,
|
||||
loadTree,
|
||||
openBatchBinding,
|
||||
openPointCreateModal,
|
||||
renderSelectedNodes,
|
||||
saveBatchBinding,
|
||||
savePointBinding,
|
||||
updatePointFilterSummary,
|
||||
updateSelectedPointSummary,
|
||||
} from "./points.js";
|
||||
import { state } from "./state.js";
|
||||
import { loadSources, saveSource } from "./sources.js";
|
||||
import { closeUnitModal, loadUnits, openCreateUnitModal, resetUnitForm, renderUnits, saveUnit } from "./units.js";
|
||||
|
||||
let _configLoaded = false;
|
||||
|
||||
function switchView(view) {
|
||||
state.activeView = view;
|
||||
const main = document.querySelector("main");
|
||||
main.className = view === "ops" ? "grid-ops" : "grid-config";
|
||||
|
||||
dom.tabOps.classList.toggle("active", view === "ops");
|
||||
dom.tabConfig.classList.toggle("active", view === "config");
|
||||
|
||||
// config-only panels
|
||||
["top-left", "top-right", "bottom-left", "bottom-right"].forEach((cls) => {
|
||||
const el = main.querySelector(`.panel.${cls}`);
|
||||
if (el) el.classList.toggle("hidden", view === "ops");
|
||||
});
|
||||
// bottom-mid is log-stream in config, hidden in ops
|
||||
const logStreamPanel = main.querySelector(".panel.bottom-mid");
|
||||
if (logStreamPanel) logStreamPanel.classList.toggle("hidden", view === "ops");
|
||||
|
||||
// ops-only panels
|
||||
const opsMain = main.querySelector(".panel.ops-main");
|
||||
const opsBottom = main.querySelector(".panel.ops-bottom");
|
||||
if (opsMain) opsMain.classList.toggle("hidden", view === "config");
|
||||
if (opsBottom) opsBottom.classList.toggle("hidden", view === "config");
|
||||
|
||||
if (view === "config") {
|
||||
startLogs();
|
||||
if (!_configLoaded) {
|
||||
_configLoaded = true;
|
||||
withStatus((async () => {
|
||||
await Promise.all([loadSources(), loadEquipments(), loadEvents()]);
|
||||
await loadPoints();
|
||||
})());
|
||||
}
|
||||
} else {
|
||||
stopLogs();
|
||||
}
|
||||
}
|
||||
|
||||
function bindEvents() {
|
||||
dom.unitForm.addEventListener("submit", (event) => withStatus(saveUnit(event)));
|
||||
dom.sourceForm.addEventListener("submit", (event) => withStatus(saveSource(event)));
|
||||
dom.equipmentForm.addEventListener("submit", (event) => withStatus(saveEquipment(event)));
|
||||
dom.pointBindingForm.addEventListener("submit", (event) => withStatus(savePointBinding(event)));
|
||||
dom.batchBindingForm.addEventListener("submit", (event) => withStatus(saveBatchBinding(event)));
|
||||
|
||||
dom.unitResetBtn.addEventListener("click", resetUnitForm);
|
||||
dom.refreshUnitBtn.addEventListener("click", () => withStatus(loadUnits().then(loadEvents)));
|
||||
dom.newUnitBtn.addEventListener("click", openCreateUnitModal);
|
||||
dom.closeUnitModalBtn.addEventListener("click", closeUnitModal);
|
||||
|
||||
dom.sourceResetBtn.addEventListener("click", () => dom.sourceForm.reset());
|
||||
dom.equipmentResetBtn.addEventListener("click", resetEquipmentForm);
|
||||
dom.refreshEquipmentBtn.addEventListener("click", () => withStatus(loadEquipments()));
|
||||
dom.newEquipmentBtn.addEventListener("click", openCreateEquipmentModal);
|
||||
dom.closeEquipmentModalBtn.addEventListener("click", closeEquipmentModal);
|
||||
dom.applyEquipmentUnitBtn.addEventListener("click", () => withStatus(applyBatchEquipmentUnit()));
|
||||
dom.clearEquipmentSelectionBtn.addEventListener("click", clearSelectedEquipments);
|
||||
|
||||
dom.openPointModalBtn.addEventListener("click", openPointCreateModal);
|
||||
dom.pointSourceSelect.addEventListener("change", () => {
|
||||
dom.nodeTree.innerHTML = '<div class="muted">Click "Load Nodes" to fetch node tree</div>';
|
||||
dom.pointSourceNodeCount.textContent = "Nodes: 0";
|
||||
});
|
||||
dom.browseNodesBtn.addEventListener("click", () => withStatus(browseAndLoadTree()));
|
||||
dom.refreshTreeBtn.addEventListener("click", () => withStatus(loadTree()));
|
||||
dom.createPointsBtn.addEventListener("click", () => withStatus(createPoints()));
|
||||
dom.closeModalBtn.addEventListener("click", () => dom.pointModal.classList.add("hidden"));
|
||||
|
||||
dom.openSourceFormBtn.addEventListener("click", () => {
|
||||
dom.sourceForm.reset();
|
||||
dom.sourceId.value = "";
|
||||
dom.sourceModal.classList.remove("hidden");
|
||||
});
|
||||
dom.closeSourceModalBtn.addEventListener("click", () => dom.sourceModal.classList.add("hidden"));
|
||||
|
||||
dom.clearPointBindingBtn.addEventListener("click", () => withStatus(clearPointBinding()));
|
||||
dom.closePointBindingModalBtn.addEventListener("click", () => {
|
||||
dom.pointBindingModal.classList.add("hidden");
|
||||
});
|
||||
|
||||
dom.openBatchBindingBtn.addEventListener("click", openBatchBinding);
|
||||
dom.clearSelectedPointsBtn.addEventListener("click", clearSelectedPoints);
|
||||
dom.closeBatchBindingModalBtn.addEventListener("click", () => {
|
||||
dom.batchBindingModal.classList.add("hidden");
|
||||
});
|
||||
dom.clearBatchBindingBtn.addEventListener("click", () => withStatus(clearBatchBinding()));
|
||||
|
||||
dom.toggleAllPoints.addEventListener("change", () => {
|
||||
const checked = dom.toggleAllPoints.checked;
|
||||
dom.pointList.querySelectorAll('input[data-point-select="true"]').forEach((input) => {
|
||||
input.checked = checked;
|
||||
input.dispatchEvent(new Event("change"));
|
||||
});
|
||||
});
|
||||
|
||||
dom.openReadmeDocBtn.addEventListener("click", () => withStatus(openReadmeDrawer()));
|
||||
dom.openApiDocBtn.addEventListener("click", () => withStatus(openApiDocDrawer()));
|
||||
dom.closeApiDocBtn.addEventListener("click", closeApiDocDrawer);
|
||||
dom.refreshEventBtn.addEventListener("click", () => withStatus(loadEvents()));
|
||||
|
||||
dom.refreshChartBtn.addEventListener("click", () => {
|
||||
if (!state.chartPointId) {
|
||||
return;
|
||||
}
|
||||
withStatus(openChart(state.chartPointId, state.chartPointName));
|
||||
});
|
||||
|
||||
dom.prevPointsBtn.addEventListener("click", () => {
|
||||
if (state.pointsPage > 1) {
|
||||
state.pointsPage -= 1;
|
||||
withStatus(loadPoints());
|
||||
}
|
||||
});
|
||||
|
||||
dom.nextPointsBtn.addEventListener("click", () => {
|
||||
const totalPages = Math.max(1, Math.ceil(state.pointsTotal / state.pointsPageSize));
|
||||
if (state.pointsPage < totalPages) {
|
||||
state.pointsPage += 1;
|
||||
withStatus(loadPoints());
|
||||
}
|
||||
});
|
||||
|
||||
dom.equipmentKeyword.addEventListener("keydown", (event) => {
|
||||
if (event.key === "Enter") {
|
||||
event.preventDefault();
|
||||
withStatus(loadEquipments());
|
||||
}
|
||||
});
|
||||
|
||||
dom.tabOps.addEventListener("click", () => switchView("ops"));
|
||||
dom.tabConfig.addEventListener("click", () => switchView("config"));
|
||||
|
||||
document.addEventListener("equipments-updated", () => {
|
||||
renderUnits();
|
||||
// Re-fetch units so embedded equipment data stays in sync with config changes.
|
||||
loadUnits().catch(() => {});
|
||||
});
|
||||
|
||||
document.addEventListener("units-loaded", () => {
|
||||
renderOpsUnits();
|
||||
if (!state.selectedOpsUnitId) loadAllEquipmentCards();
|
||||
});
|
||||
}
|
||||
|
||||
async function bootstrap() {
|
||||
bindEvents();
|
||||
switchView("ops");
|
||||
renderSelectedNodes();
|
||||
updateSelectedPointSummary();
|
||||
updatePointFilterSummary();
|
||||
renderChart();
|
||||
startPointSocket();
|
||||
|
||||
await withStatus(Promise.all([loadUnits(), loadEvents()]));
|
||||
startOps();
|
||||
}
|
||||
|
||||
bootstrap();
|
||||
183
web/js/chart.js
183
web/js/chart.js
|
|
@ -1,183 +0,0 @@
|
|||
import { apiFetch } from "./api.js";
|
||||
import { dom } from "./dom.js";
|
||||
import { state } from "./state.js";
|
||||
|
||||
function normalizeChartItem(item) {
|
||||
let valueNumber = null;
|
||||
if (typeof item?.value_number === "number" && Number.isFinite(item.value_number)) {
|
||||
valueNumber = item.value_number;
|
||||
} else if (typeof item?.value === "number" && Number.isFinite(item.value)) {
|
||||
valueNumber = item.value;
|
||||
} else if (typeof item?.value === "boolean") {
|
||||
valueNumber = item.value ? 1 : 0;
|
||||
} else if (typeof item?.value?.float === "number" && Number.isFinite(item.value.float)) {
|
||||
valueNumber = item.value.float;
|
||||
} else if (typeof item?.value?.int === "number" && Number.isFinite(item.value.int)) {
|
||||
valueNumber = item.value.int;
|
||||
} else if (typeof item?.value?.uint === "number" && Number.isFinite(item.value.uint)) {
|
||||
valueNumber = item.value.uint;
|
||||
} else if (typeof item?.value?.bool === "boolean") {
|
||||
valueNumber = item.value.bool ? 1 : 0;
|
||||
} else if (typeof item?.value_text === "string") {
|
||||
const parsed = Number(item.value_text);
|
||||
if (Number.isFinite(parsed)) {
|
||||
valueNumber = parsed;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
timestamp: item?.timestamp || "",
|
||||
valueNumber,
|
||||
valueText: item?.value_text || (valueNumber === null ? "" : String(valueNumber)),
|
||||
};
|
||||
}
|
||||
|
||||
function formatAxisValue(value) {
|
||||
if (!Number.isFinite(value)) {
|
||||
return "--";
|
||||
}
|
||||
if (Math.abs(value) >= 1000 || Math.abs(value) < 0.01) {
|
||||
return value.toExponential(2);
|
||||
}
|
||||
return value.toFixed(2);
|
||||
}
|
||||
|
||||
function formatTimeLabel(timestamp) {
|
||||
if (!timestamp) {
|
||||
return "--";
|
||||
}
|
||||
const match = String(timestamp).match(/(\d{2}:\d{2}:\d{2})/);
|
||||
return match ? match[1] : String(timestamp);
|
||||
}
|
||||
|
||||
export async function openChart(pointId, pointName) {
|
||||
state.chartPointId = pointId;
|
||||
state.chartPointName = pointName || "Point";
|
||||
dom.chartTitle.textContent = `${state.chartPointName} Chart`;
|
||||
|
||||
const items = await apiFetch(`/api/point/${pointId}/history?limit=120`);
|
||||
state.chartData = (items || [])
|
||||
.map(normalizeChartItem)
|
||||
.filter((item) => item.valueNumber !== null);
|
||||
|
||||
renderChart();
|
||||
}
|
||||
|
||||
export function appendChartPoint(item) {
|
||||
if (!state.chartPointId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const normalized = normalizeChartItem(item);
|
||||
if (normalized.valueNumber === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
const last = state.chartData[state.chartData.length - 1];
|
||||
if (
|
||||
last &&
|
||||
last.timestamp === normalized.timestamp &&
|
||||
last.valueText === normalized.valueText &&
|
||||
last.valueNumber === normalized.valueNumber
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
state.chartData.push(normalized);
|
||||
if (state.chartData.length > 120) {
|
||||
state.chartData = state.chartData.slice(-120);
|
||||
}
|
||||
renderChart();
|
||||
}
|
||||
|
||||
export function renderChart() {
|
||||
const ctx = dom.chartCanvas.getContext("2d");
|
||||
const width = dom.chartCanvas.width;
|
||||
const height = dom.chartCanvas.height;
|
||||
ctx.clearRect(0, 0, width, height);
|
||||
|
||||
if (!state.chartData.length) {
|
||||
ctx.fillStyle = "#94a3b8";
|
||||
ctx.font = "14px Segoe UI";
|
||||
ctx.fillText("Click a point row to view its chart", 24, 40);
|
||||
dom.chartSummary.textContent = "Click a point row to view its chart";
|
||||
return;
|
||||
}
|
||||
|
||||
const values = state.chartData.map((item) => item.valueNumber);
|
||||
let min = Math.min(...values);
|
||||
let max = Math.max(...values);
|
||||
if (min === max) {
|
||||
min -= 1;
|
||||
max += 1;
|
||||
}
|
||||
|
||||
const padding = { top: 20, right: 20, bottom: 42, left: 64 };
|
||||
const plotWidth = width - padding.left - padding.right;
|
||||
const plotHeight = height - padding.top - padding.bottom;
|
||||
|
||||
ctx.strokeStyle = "#cbd5e1";
|
||||
ctx.lineWidth = 1;
|
||||
|
||||
for (let i = 0; i <= 4; i += 1) {
|
||||
const y = padding.top + (plotHeight / 4) * i;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(padding.left, y);
|
||||
ctx.lineTo(width - padding.right, y);
|
||||
ctx.stroke();
|
||||
}
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(padding.left, padding.top);
|
||||
ctx.lineTo(padding.left, height - padding.bottom);
|
||||
ctx.lineTo(width - padding.right, height - padding.bottom);
|
||||
ctx.strokeStyle = "#94a3b8";
|
||||
ctx.stroke();
|
||||
|
||||
ctx.fillStyle = "#64748b";
|
||||
ctx.font = "12px Segoe UI";
|
||||
for (let i = 0; i <= 4; i += 1) {
|
||||
const value = max - ((max - min) / 4) * i;
|
||||
const y = padding.top + (plotHeight / 4) * i;
|
||||
ctx.fillText(formatAxisValue(value), 8, y + 4);
|
||||
}
|
||||
|
||||
const firstLabel = formatTimeLabel(state.chartData[0]?.timestamp);
|
||||
const middleLabel = formatTimeLabel(
|
||||
state.chartData[Math.floor((state.chartData.length - 1) / 2)]?.timestamp,
|
||||
);
|
||||
const lastLabel = formatTimeLabel(state.chartData[state.chartData.length - 1]?.timestamp);
|
||||
|
||||
ctx.fillText(firstLabel, padding.left, height - 12);
|
||||
const middleWidth = ctx.measureText(middleLabel).width;
|
||||
ctx.fillText(middleLabel, padding.left + plotWidth / 2 - middleWidth / 2, height - 12);
|
||||
const lastWidth = ctx.measureText(lastLabel).width;
|
||||
ctx.fillText(lastLabel, width - padding.right - lastWidth, height - 12);
|
||||
|
||||
ctx.save();
|
||||
ctx.translate(16, padding.top + plotHeight / 2);
|
||||
ctx.rotate(-Math.PI / 2);
|
||||
ctx.fillStyle = "#64748b";
|
||||
ctx.fillText("Value", 0, 0);
|
||||
ctx.restore();
|
||||
ctx.fillText("Time", width / 2 - 12, height - 28);
|
||||
|
||||
ctx.strokeStyle = "#2563eb";
|
||||
ctx.lineWidth = 2;
|
||||
ctx.beginPath();
|
||||
|
||||
state.chartData.forEach((item, index) => {
|
||||
const x = padding.left + (plotWidth * index) / Math.max(1, state.chartData.length - 1);
|
||||
const y = padding.top + ((max - item.valueNumber) / (max - min)) * plotHeight;
|
||||
if (index === 0) {
|
||||
ctx.moveTo(x, y);
|
||||
} else {
|
||||
ctx.lineTo(x, y);
|
||||
}
|
||||
});
|
||||
|
||||
ctx.stroke();
|
||||
|
||||
const latest = state.chartData[state.chartData.length - 1];
|
||||
dom.chartSummary.textContent = `Latest ${state.chartData.length} points, current value ${latest.valueText || latest.valueNumber}`;
|
||||
}
|
||||
137
web/js/docs.js
137
web/js/docs.js
|
|
@ -1,137 +0,0 @@
|
|||
import { apiFetch } from "./api.js";
|
||||
import { dom } from "./dom.js";
|
||||
import { state } from "./state.js";
|
||||
|
||||
function escapeHtml(text) {
|
||||
return text
|
||||
.replaceAll("&", "&")
|
||||
.replaceAll("<", "<")
|
||||
.replaceAll(">", ">");
|
||||
}
|
||||
|
||||
function slugify(text) {
|
||||
return text
|
||||
.toLowerCase()
|
||||
.trim()
|
||||
.replace(/[^\w\u4e00-\u9fa5]+/g, "-")
|
||||
.replace(/^-+|-+$/g, "");
|
||||
}
|
||||
|
||||
function parseMarkdown(text) {
|
||||
const lines = text.split(/\r?\n/);
|
||||
const blocks = [];
|
||||
const headings = [];
|
||||
let inCode = false;
|
||||
let codeBuffer = [];
|
||||
let paragraph = [];
|
||||
|
||||
const flushParagraph = () => {
|
||||
if (!paragraph.length) {
|
||||
return;
|
||||
}
|
||||
blocks.push(`<p>${escapeHtml(paragraph.join(" "))}</p>`);
|
||||
paragraph = [];
|
||||
};
|
||||
|
||||
const flushCode = () => {
|
||||
if (!codeBuffer.length) {
|
||||
return;
|
||||
}
|
||||
blocks.push(`<pre><code>${escapeHtml(codeBuffer.join("\n"))}</code></pre>`);
|
||||
codeBuffer = [];
|
||||
};
|
||||
|
||||
lines.forEach((line) => {
|
||||
if (line.startsWith("```")) {
|
||||
if (inCode) {
|
||||
flushCode();
|
||||
} else {
|
||||
flushParagraph();
|
||||
}
|
||||
inCode = !inCode;
|
||||
return;
|
||||
}
|
||||
|
||||
if (inCode) {
|
||||
codeBuffer.push(line);
|
||||
return;
|
||||
}
|
||||
|
||||
const heading = line.match(/^(#{1,4})\s+(.*)$/);
|
||||
if (heading) {
|
||||
flushParagraph();
|
||||
const level = heading[1].length;
|
||||
const textValue = heading[2].trim();
|
||||
const id = slugify(textValue);
|
||||
headings.push({ level, text: textValue, id });
|
||||
blocks.push(`<h${level} id="${id}">${escapeHtml(textValue)}</h${level}>`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!line.trim()) {
|
||||
flushParagraph();
|
||||
return;
|
||||
}
|
||||
|
||||
paragraph.push(line.trim());
|
||||
});
|
||||
|
||||
flushParagraph();
|
||||
flushCode();
|
||||
|
||||
return { html: blocks.join(""), headings };
|
||||
}
|
||||
|
||||
async function loadDoc(url, emptyMessage) {
|
||||
const text = await apiFetch(url);
|
||||
const { html, headings } = parseMarkdown(text || "");
|
||||
|
||||
dom.apiDocContent.innerHTML = html || `<p>${emptyMessage}</p>`;
|
||||
dom.apiDocToc.innerHTML = headings.length
|
||||
? headings
|
||||
.map(
|
||||
(item) =>
|
||||
`<a class="doc-toc-item level-${item.level}" href="#${item.id}">${escapeHtml(item.text)}</a>`,
|
||||
)
|
||||
.join("")
|
||||
: "<div class=\"muted\">未解析到标题</div>";
|
||||
|
||||
dom.apiDocToc.querySelectorAll("a").forEach((link) => {
|
||||
link.addEventListener("click", (event) => {
|
||||
event.preventDefault();
|
||||
const id = link.getAttribute("href")?.slice(1);
|
||||
if (!id) {
|
||||
return;
|
||||
}
|
||||
const target = dom.apiDocContent.querySelector(`#${CSS.escape(id)}`);
|
||||
if (target) {
|
||||
const offset = target.getBoundingClientRect().top - dom.apiDocContent.getBoundingClientRect().top;
|
||||
dom.apiDocContent.scrollBy({ top: offset, behavior: "smooth" });
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function openApiDocDrawer() {
|
||||
const title = dom.apiDocDrawer.querySelector("h3");
|
||||
if (title) title.textContent = "API.md";
|
||||
dom.apiDocDrawer.classList.remove("hidden");
|
||||
if (state.docDrawerSource !== "api") {
|
||||
state.docDrawerSource = "api";
|
||||
await loadDoc("/api/docs/api-md", "API.md 为空");
|
||||
}
|
||||
}
|
||||
|
||||
export async function openReadmeDrawer() {
|
||||
const title = dom.apiDocDrawer.querySelector("h3");
|
||||
if (title) title.textContent = "README.md";
|
||||
dom.apiDocDrawer.classList.remove("hidden");
|
||||
if (state.docDrawerSource !== "readme") {
|
||||
state.docDrawerSource = "readme";
|
||||
await loadDoc("/api/docs/readme-md", "README.md 为空");
|
||||
}
|
||||
}
|
||||
|
||||
export function closeApiDocDrawer() {
|
||||
dom.apiDocDrawer.classList.add("hidden");
|
||||
}
|
||||
105
web/js/dom.js
105
web/js/dom.js
|
|
@ -1,105 +0,0 @@
|
|||
const byId = (id) => document.getElementById(id);
|
||||
|
||||
export const dom = {
|
||||
statusText: byId("statusText"),
|
||||
wsDot: byId("wsDot"),
|
||||
wsLabel: byId("wsLabel"),
|
||||
batchStartAutoBtn: byId("batchStartAutoBtn"),
|
||||
batchStopAutoBtn: byId("batchStopAutoBtn"),
|
||||
tabOps: byId("tabOps"),
|
||||
tabConfig: byId("tabConfig"),
|
||||
opsUnitList: byId("opsUnitList"),
|
||||
opsEquipmentArea: byId("opsEquipmentArea"),
|
||||
logView: byId("logView"),
|
||||
sourceList: byId("sourceList"),
|
||||
unitList: byId("unitList"),
|
||||
eventList: byId("eventList"),
|
||||
nodeTree: byId("nodeTree"),
|
||||
pointList: byId("pointList"),
|
||||
pointsPageInfo: byId("pointsPageInfo"),
|
||||
selectedCount: byId("selectedCount"),
|
||||
selectedPointCount: byId("selectedPointCount"),
|
||||
pointFilterSummary: byId("pointFilterSummary"),
|
||||
pointSourceSelect: byId("pointSourceSelect"),
|
||||
pointSourceNodeCount: byId("pointSourceNodeCount"),
|
||||
openPointModalBtn: byId("openPointModal"),
|
||||
chartCanvas: byId("chartCanvas"),
|
||||
chartTitle: byId("chartTitle"),
|
||||
chartSummary: byId("chartSummary"),
|
||||
pointModal: byId("pointModal"),
|
||||
unitModal: byId("unitModal"),
|
||||
sourceModal: byId("sourceModal"),
|
||||
equipmentModal: byId("equipmentModal"),
|
||||
pointBindingModal: byId("pointBindingModal"),
|
||||
batchBindingModal: byId("batchBindingModal"),
|
||||
apiDocDrawer: byId("apiDocDrawer"),
|
||||
unitForm: byId("unitForm"),
|
||||
unitId: byId("unitId"),
|
||||
unitCode: byId("unitCode"),
|
||||
unitName: byId("unitName"),
|
||||
unitDescription: byId("unitDescription"),
|
||||
unitEnabled: byId("unitEnabled"),
|
||||
unitRunTimeSec: byId("unitRunTimeSec"),
|
||||
unitStopTimeSec: byId("unitStopTimeSec"),
|
||||
unitAccTimeSec: byId("unitAccTimeSec"),
|
||||
unitBlTimeSec: byId("unitBlTimeSec"),
|
||||
unitManualAck: byId("unitManualAck"),
|
||||
unitResetBtn: byId("unitReset"),
|
||||
sourceForm: byId("sourceForm"),
|
||||
sourceId: byId("sourceId"),
|
||||
sourceName: byId("sourceName"),
|
||||
sourceEndpoint: byId("sourceEndpoint"),
|
||||
sourceEnabled: byId("sourceEnabled"),
|
||||
sourceResetBtn: byId("sourceReset"),
|
||||
equipmentForm: byId("equipmentForm"),
|
||||
equipmentId: byId("equipmentId"),
|
||||
equipmentUnitId: byId("equipmentUnitId"),
|
||||
equipmentCode: byId("equipmentCode"),
|
||||
equipmentName: byId("equipmentName"),
|
||||
equipmentKind: byId("equipmentKind"),
|
||||
equipmentDescription: byId("equipmentDescription"),
|
||||
equipmentResetBtn: byId("equipmentReset"),
|
||||
equipmentKeyword: byId("equipmentKeyword"),
|
||||
equipmentBatchUnitId: byId("equipmentBatchUnitId"),
|
||||
selectedEquipmentSummary: byId("selectedEquipmentSummary"),
|
||||
equipmentList: byId("equipmentList"),
|
||||
refreshUnitBtn: byId("refreshUnitBtn"),
|
||||
newUnitBtn: byId("newUnitBtn"),
|
||||
closeUnitModalBtn: byId("closeUnitModal"),
|
||||
closeEquipmentModalBtn: byId("closeEquipmentModal"),
|
||||
refreshEventBtn: byId("refreshEventBtn"),
|
||||
applyEquipmentUnitBtn: byId("applyEquipmentUnitBtn"),
|
||||
clearEquipmentSelectionBtn: byId("clearEquipmentSelectionBtn"),
|
||||
pointBindingForm: byId("pointBindingForm"),
|
||||
bindingPointId: byId("bindingPointId"),
|
||||
bindingPointName: byId("bindingPointName"),
|
||||
bindingEquipmentId: byId("bindingEquipmentId"),
|
||||
bindingSignalRole: byId("bindingSignalRole"),
|
||||
batchBindingForm: byId("batchBindingForm"),
|
||||
batchBindingSummary: byId("batchBindingSummary"),
|
||||
batchBindingEquipmentId: byId("batchBindingEquipmentId"),
|
||||
batchBindingSignalRole: byId("batchBindingSignalRole"),
|
||||
apiDocToc: byId("apiDocToc"),
|
||||
apiDocContent: byId("apiDocContent"),
|
||||
openReadmeDocBtn: byId("openReadmeDoc"),
|
||||
openApiDocBtn: byId("openApiDoc"),
|
||||
closeApiDocBtn: byId("closeApiDoc"),
|
||||
refreshChartBtn: byId("refreshChart"),
|
||||
prevPointsBtn: byId("prevPoints"),
|
||||
nextPointsBtn: byId("nextPoints"),
|
||||
refreshEquipmentBtn: byId("refreshEquipmentBtn"),
|
||||
newEquipmentBtn: byId("newEquipmentBtn"),
|
||||
browseNodesBtn: byId("browseNodes"),
|
||||
refreshTreeBtn: byId("refreshTree"),
|
||||
createPointsBtn: byId("createPoints"),
|
||||
closeModalBtn: byId("closeModal"),
|
||||
openSourceFormBtn: byId("openSourceForm"),
|
||||
closeSourceModalBtn: byId("closeSourceModal"),
|
||||
clearPointBindingBtn: byId("clearPointBinding"),
|
||||
closePointBindingModalBtn: byId("closePointBindingModal"),
|
||||
toggleAllPoints: byId("toggleAllPoints"),
|
||||
openBatchBindingBtn: byId("openBatchBinding"),
|
||||
clearSelectedPointsBtn: byId("clearSelectedPoints"),
|
||||
closeBatchBindingModalBtn: byId("closeBatchBindingModal"),
|
||||
clearBatchBindingBtn: byId("clearBatchBinding"),
|
||||
};
|
||||
|
|
@ -1,349 +0,0 @@
|
|||
import { apiFetch } from "./api.js";
|
||||
import { dom } from "./dom.js";
|
||||
import { renderEquipmentKindOptions, renderRoleOptions } from "./roles.js";
|
||||
import { clearSelectedPoints, loadPoints, updatePointFilterSummary } from "./points.js";
|
||||
import { state } from "./state.js";
|
||||
|
||||
function equipmentOf(item) {
|
||||
return item && item.equipment ? item.equipment : item;
|
||||
}
|
||||
|
||||
function currentUnitLabel(unitId) {
|
||||
if (!unitId) {
|
||||
return "未绑定单元";
|
||||
}
|
||||
const unit = state.unitMap.get(unitId);
|
||||
return unit ? `${unit.code} / ${unit.name}` : "未知单元";
|
||||
}
|
||||
|
||||
function filteredEquipments() {
|
||||
if (!state.selectedUnitId) {
|
||||
return state.equipments;
|
||||
}
|
||||
|
||||
return state.equipments.filter((item) => {
|
||||
const equipment = equipmentOf(item);
|
||||
return equipment.unit_id === state.selectedUnitId;
|
||||
});
|
||||
}
|
||||
|
||||
function renderEquipmentUnitOptions(selected = "", target = dom.equipmentUnitId) {
|
||||
if (!target) {
|
||||
return;
|
||||
}
|
||||
|
||||
const options = ['<option value="">未绑定单元</option>'];
|
||||
state.units.forEach((unit) => {
|
||||
const isSelected = unit.id === selected ? "selected" : "";
|
||||
options.push(`<option value="${unit.id}" ${isSelected}>${unit.code} / ${unit.name}</option>`);
|
||||
});
|
||||
target.innerHTML = options.join("");
|
||||
}
|
||||
|
||||
function renderBatchUnitOptions(selected = "") {
|
||||
if (!dom.equipmentBatchUnitId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const options = ['<option value="">批量绑定到单元...</option>'];
|
||||
state.units.forEach((unit) => {
|
||||
const isSelected = unit.id === selected ? "selected" : "";
|
||||
options.push(`<option value="${unit.id}" ${isSelected}>${unit.code} / ${unit.name}</option>`);
|
||||
});
|
||||
dom.equipmentBatchUnitId.innerHTML = options.join("");
|
||||
}
|
||||
|
||||
function updateSelectedEquipmentSummary() {
|
||||
if (!dom.selectedEquipmentSummary) {
|
||||
return;
|
||||
}
|
||||
dom.selectedEquipmentSummary.textContent = `已选 ${state.selectedEquipmentIds.size} 台设备`;
|
||||
}
|
||||
|
||||
export function renderBindingEquipmentOptions(selected = "", target = dom.bindingEquipmentId) {
|
||||
const options = ['<option value="">Unbound</option>'];
|
||||
filteredEquipments().forEach((item) => {
|
||||
const equipment = equipmentOf(item);
|
||||
const isSelected = equipment.id === selected ? "selected" : "";
|
||||
options.push(
|
||||
`<option value="${equipment.id}" ${isSelected}>${equipment.code} / ${equipment.name}</option>`,
|
||||
);
|
||||
});
|
||||
target.innerHTML = options.join("");
|
||||
}
|
||||
|
||||
export function renderBatchBindingDefaults() {
|
||||
renderBindingEquipmentOptions("", dom.batchBindingEquipmentId);
|
||||
dom.batchBindingSignalRole.innerHTML = renderRoleOptions("");
|
||||
}
|
||||
|
||||
export function resetEquipmentForm() {
|
||||
dom.equipmentForm.reset();
|
||||
dom.equipmentId.value = "";
|
||||
renderEquipmentUnitOptions("");
|
||||
dom.equipmentKind.innerHTML = renderEquipmentKindOptions("");
|
||||
}
|
||||
|
||||
function openEquipmentModal() {
|
||||
dom.equipmentModal.classList.remove("hidden");
|
||||
}
|
||||
|
||||
export function closeEquipmentModal() {
|
||||
dom.equipmentModal.classList.add("hidden");
|
||||
}
|
||||
|
||||
export function openCreateEquipmentModal() {
|
||||
resetEquipmentForm();
|
||||
if (state.selectedUnitId && dom.equipmentUnitId) {
|
||||
dom.equipmentUnitId.value = state.selectedUnitId;
|
||||
}
|
||||
openEquipmentModal();
|
||||
}
|
||||
|
||||
function openEditEquipmentModal(equipment) {
|
||||
dom.equipmentId.value = equipment.id || "";
|
||||
dom.equipmentUnitId.value = equipment.unit_id || "";
|
||||
dom.equipmentCode.value = equipment.code || "";
|
||||
dom.equipmentName.value = equipment.name || "";
|
||||
dom.equipmentKind.innerHTML = renderEquipmentKindOptions(equipment.kind || "");
|
||||
dom.equipmentDescription.value = equipment.description || "";
|
||||
openEquipmentModal();
|
||||
}
|
||||
|
||||
async function selectEquipment(equipmentId) {
|
||||
state.selectedEquipmentId = state.selectedEquipmentId === equipmentId ? null : equipmentId;
|
||||
state.pointsPage = 1;
|
||||
clearSelectedPoints();
|
||||
renderEquipments();
|
||||
updatePointFilterSummary();
|
||||
await loadPoints();
|
||||
}
|
||||
|
||||
function toggleEquipmentSelection(equipmentId, checked) {
|
||||
if (checked) {
|
||||
state.selectedEquipmentIds.add(equipmentId);
|
||||
} else {
|
||||
state.selectedEquipmentIds.delete(equipmentId);
|
||||
}
|
||||
updateSelectedEquipmentSummary();
|
||||
}
|
||||
|
||||
export function clearSelectedEquipments() {
|
||||
state.selectedEquipmentIds.clear();
|
||||
renderEquipments();
|
||||
updateSelectedEquipmentSummary();
|
||||
}
|
||||
|
||||
export function clearEquipmentFilter() {
|
||||
state.selectedEquipmentId = null;
|
||||
state.pointsPage = 1;
|
||||
renderEquipments();
|
||||
updatePointFilterSummary();
|
||||
return loadPoints();
|
||||
}
|
||||
|
||||
export function renderEquipments() {
|
||||
dom.equipmentList.innerHTML = "";
|
||||
updateSelectedEquipmentSummary();
|
||||
|
||||
const items = filteredEquipments();
|
||||
if (!items.length) {
|
||||
dom.equipmentList.innerHTML = '<div class="list-item"><div class="muted">No equipment</div></div>';
|
||||
return;
|
||||
}
|
||||
|
||||
items.forEach((item) => {
|
||||
const equipment = equipmentOf(item);
|
||||
const box = document.createElement("div");
|
||||
box.className = `list-item equipment-card ${state.selectedEquipmentId === equipment.id ? "selected" : ""}`;
|
||||
box.innerHTML = `
|
||||
<label class="equipment-select-row">
|
||||
<input type="checkbox" data-equipment-select="true" ${state.selectedEquipmentIds.has(equipment.id) ? "checked" : ""} />
|
||||
<span class="muted">批量选择</span>
|
||||
</label>
|
||||
<div class="row">
|
||||
<strong>${equipment.code}</strong>
|
||||
<span class="badge">${item.point_count ?? 0} pts</span>
|
||||
</div>
|
||||
<div>${equipment.name}</div>
|
||||
<div class="muted">${equipment.kind || "No type"}</div>
|
||||
<div class="muted">单元: ${currentUnitLabel(equipment.unit_id)}</div>
|
||||
<div class="row equipment-card-actions"></div>
|
||||
`;
|
||||
|
||||
box.addEventListener("click", () => {
|
||||
selectEquipment(equipment.id).catch((error) => {
|
||||
dom.statusText.textContent = error.message;
|
||||
});
|
||||
});
|
||||
|
||||
const checkbox = box.querySelector('input[data-equipment-select="true"]');
|
||||
checkbox.addEventListener("click", (event) => {
|
||||
event.stopPropagation();
|
||||
});
|
||||
checkbox.addEventListener("change", (event) => {
|
||||
toggleEquipmentSelection(equipment.id, event.target.checked);
|
||||
});
|
||||
|
||||
const actionRow = box.querySelector(".equipment-card-actions");
|
||||
|
||||
const editBtn = document.createElement("button");
|
||||
editBtn.className = "secondary";
|
||||
editBtn.textContent = "Edit";
|
||||
editBtn.addEventListener("click", (event) => {
|
||||
event.stopPropagation();
|
||||
openEditEquipmentModal(equipment);
|
||||
});
|
||||
|
||||
const deleteBtn = document.createElement("button");
|
||||
deleteBtn.className = "danger";
|
||||
deleteBtn.textContent = "Delete";
|
||||
deleteBtn.addEventListener("click", (event) => {
|
||||
event.stopPropagation();
|
||||
deleteEquipment(equipment.id).catch((error) => {
|
||||
dom.statusText.textContent = error.message;
|
||||
});
|
||||
});
|
||||
|
||||
actionRow.append(editBtn, deleteBtn);
|
||||
|
||||
if (equipment.kind === "coal_feeder" || equipment.kind === "distributor") {
|
||||
const unitRuntime = equipment.unit_id ? state.runtimes.get(equipment.unit_id) : null;
|
||||
const remLocal = unitRuntime?.rem_local ?? false;
|
||||
|
||||
const startBtn = document.createElement("button");
|
||||
startBtn.className = "secondary";
|
||||
startBtn.textContent = "Start";
|
||||
startBtn.disabled = remLocal;
|
||||
startBtn.title = remLocal ? "设备处于本地模式(REM关)" : "";
|
||||
startBtn.addEventListener("click", (e) => {
|
||||
e.stopPropagation();
|
||||
apiFetch(`/api/control/equipment/${equipment.id}/start`, { method: "POST" })
|
||||
.catch(() => {});
|
||||
});
|
||||
|
||||
const stopBtn = document.createElement("button");
|
||||
stopBtn.className = "danger";
|
||||
stopBtn.textContent = "Stop";
|
||||
stopBtn.disabled = remLocal;
|
||||
stopBtn.title = remLocal ? "设备处于本地模式(REM关)" : "";
|
||||
stopBtn.addEventListener("click", (e) => {
|
||||
e.stopPropagation();
|
||||
apiFetch(`/api/control/equipment/${equipment.id}/stop`, { method: "POST" })
|
||||
.catch(() => {});
|
||||
});
|
||||
|
||||
actionRow.append(startBtn, stopBtn);
|
||||
}
|
||||
|
||||
dom.equipmentList.appendChild(box);
|
||||
});
|
||||
}
|
||||
|
||||
export async function loadEquipments() {
|
||||
const keyword = dom.equipmentKeyword.value.trim();
|
||||
const query = keyword
|
||||
? `?page=1&page_size=-1&keyword=${encodeURIComponent(keyword)}`
|
||||
: "?page=1&page_size=-1";
|
||||
const data = await apiFetch(`/api/equipment${query}`);
|
||||
state.equipments = data.data || [];
|
||||
state.equipmentMap = new Map(
|
||||
state.equipments.map((item) => {
|
||||
const equipment = equipmentOf(item);
|
||||
return [equipment.id, equipment];
|
||||
}),
|
||||
);
|
||||
|
||||
state.selectedEquipmentIds.forEach((id) => {
|
||||
if (!state.equipmentMap.has(id)) {
|
||||
state.selectedEquipmentIds.delete(id);
|
||||
}
|
||||
});
|
||||
|
||||
renderEquipmentUnitOptions(dom.equipmentUnitId?.value || "");
|
||||
renderBatchUnitOptions(dom.equipmentBatchUnitId?.value || "");
|
||||
dom.equipmentKind.innerHTML = renderEquipmentKindOptions(dom.equipmentKind?.value || "");
|
||||
renderBindingEquipmentOptions();
|
||||
renderBatchBindingDefaults();
|
||||
if (state.selectedEquipmentId && !state.equipmentMap.has(state.selectedEquipmentId)) {
|
||||
state.selectedEquipmentId = null;
|
||||
}
|
||||
renderEquipments();
|
||||
updatePointFilterSummary();
|
||||
document.dispatchEvent(new Event("equipments-updated"));
|
||||
}
|
||||
|
||||
export async function saveEquipment(event) {
|
||||
event.preventDefault();
|
||||
|
||||
const unitId = dom.equipmentUnitId.value || null;
|
||||
const payload = {
|
||||
unit_id: unitId,
|
||||
code: dom.equipmentCode.value.trim(),
|
||||
name: dom.equipmentName.value.trim(),
|
||||
kind: dom.equipmentKind.value.trim() || null,
|
||||
description: dom.equipmentDescription.value.trim() || null,
|
||||
};
|
||||
|
||||
const id = dom.equipmentId.value;
|
||||
const result = await apiFetch(id ? `/api/equipment/${id}` : "/api/equipment", {
|
||||
method: id ? "PUT" : "POST",
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
|
||||
closeEquipmentModal();
|
||||
await loadEquipments();
|
||||
if (!id && result?.id) {
|
||||
state.selectedEquipmentId = result.id;
|
||||
}
|
||||
renderEquipments();
|
||||
updatePointFilterSummary();
|
||||
await loadPoints();
|
||||
}
|
||||
|
||||
export async function applyBatchEquipmentUnit() {
|
||||
if (!state.selectedEquipmentIds.size) {
|
||||
throw new Error("请先选择设备");
|
||||
}
|
||||
|
||||
const value = dom.equipmentBatchUnitId.value;
|
||||
await apiFetch("/api/equipment/batch/set-unit", {
|
||||
method: "PUT",
|
||||
body: JSON.stringify({
|
||||
equipment_ids: Array.from(state.selectedEquipmentIds),
|
||||
unit_id: value || null,
|
||||
}),
|
||||
});
|
||||
|
||||
clearSelectedEquipments();
|
||||
renderBatchUnitOptions("");
|
||||
await loadEquipments();
|
||||
}
|
||||
|
||||
export async function deleteEquipment(equipmentId) {
|
||||
if (!window.confirm("Delete this equipment?")) {
|
||||
return;
|
||||
}
|
||||
|
||||
await apiFetch(`/api/equipment/${equipmentId}`, { method: "DELETE" });
|
||||
if (state.selectedEquipmentId === equipmentId) {
|
||||
state.selectedEquipmentId = null;
|
||||
}
|
||||
state.selectedEquipmentIds.delete(equipmentId);
|
||||
resetEquipmentForm();
|
||||
closeEquipmentModal();
|
||||
clearSelectedPoints();
|
||||
await loadEquipments();
|
||||
await loadPoints();
|
||||
}
|
||||
|
||||
export async function clearPointBinding(pointId = dom.bindingPointId.value) {
|
||||
await apiFetch(`/api/point/${pointId}`, {
|
||||
method: "PUT",
|
||||
body: JSON.stringify({ equipment_id: null, signal_role: null }),
|
||||
});
|
||||
|
||||
dom.pointBindingModal.classList.add("hidden");
|
||||
await loadEquipments();
|
||||
await loadPoints();
|
||||
}
|
||||
|
|
@ -1,86 +0,0 @@
|
|||
import { apiFetch } from "./api.js";
|
||||
import { dom } from "./dom.js";
|
||||
import { state } from "./state.js";
|
||||
|
||||
const PAGE_SIZE = 10;
|
||||
|
||||
let _page = 1;
|
||||
let _hasMore = false;
|
||||
let _loading = false;
|
||||
|
||||
function formatTime(value) {
|
||||
return value || "--";
|
||||
}
|
||||
|
||||
function makeCard(item) {
|
||||
const row = document.createElement("div");
|
||||
const level = (item.level || "info").toLowerCase();
|
||||
row.className = "event-card";
|
||||
row.innerHTML = `<span class="badge event-badge level-${level}">${level.toUpperCase()}</span><span class="muted event-time">${formatTime(item.created_at)}</span><span class="event-type">${item.event_type}</span><span class="event-message">${item.message}</span>`;
|
||||
return row;
|
||||
}
|
||||
|
||||
async function loadMore() {
|
||||
if (_loading || !_hasMore) return;
|
||||
_loading = true;
|
||||
|
||||
const params = new URLSearchParams({ page: String(_page), page_size: String(PAGE_SIZE) });
|
||||
if (state.selectedUnitId) params.set("unit_id", state.selectedUnitId);
|
||||
|
||||
try {
|
||||
const response = await apiFetch(`/api/event?${params.toString()}`);
|
||||
const items = response.data || [];
|
||||
items.forEach((item) => dom.eventList.appendChild(makeCard(item)));
|
||||
_hasMore = items.length === PAGE_SIZE;
|
||||
_page += 1;
|
||||
} finally {
|
||||
_loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadEvents() {
|
||||
_page = 1;
|
||||
_hasMore = false;
|
||||
_loading = false;
|
||||
dom.eventList.innerHTML = "";
|
||||
|
||||
const params = new URLSearchParams({ page: "1", page_size: String(PAGE_SIZE) });
|
||||
if (state.selectedUnitId) params.set("unit_id", state.selectedUnitId);
|
||||
|
||||
_loading = true;
|
||||
try {
|
||||
const response = await apiFetch(`/api/event?${params.toString()}`);
|
||||
const items = response.data || [];
|
||||
|
||||
if (!items.length) {
|
||||
dom.eventList.innerHTML = '<div class="list-item"><div class="muted">暂无事件</div></div>';
|
||||
return;
|
||||
}
|
||||
|
||||
items.forEach((item) => dom.eventList.appendChild(makeCard(item)));
|
||||
_hasMore = items.length === PAGE_SIZE;
|
||||
_page = 2;
|
||||
} finally {
|
||||
_loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
export function prependEvent(item) {
|
||||
if (state.selectedUnitId && item.unit_id !== state.selectedUnitId) return;
|
||||
|
||||
const placeholder = dom.eventList.querySelector(".list-item");
|
||||
if (placeholder) placeholder.remove();
|
||||
|
||||
dom.eventList.insertBefore(makeCard(item), dom.eventList.firstChild);
|
||||
|
||||
// Keep DOM bounded to prevent unbounded growth
|
||||
const cards = dom.eventList.querySelectorAll(".event-card");
|
||||
if (cards.length > 100) cards[cards.length - 1].remove();
|
||||
}
|
||||
|
||||
dom.eventList.addEventListener("scroll", () => {
|
||||
const el = dom.eventList;
|
||||
if (el.scrollTop + el.clientHeight >= el.scrollHeight - 40) {
|
||||
loadMore();
|
||||
}
|
||||
});
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
async function loadPartial(slot) {
|
||||
const response = await fetch(slot.dataset.partial);
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to load partial: ${slot.dataset.partial}`);
|
||||
}
|
||||
|
||||
const html = await response.text();
|
||||
slot.insertAdjacentHTML("beforebegin", html);
|
||||
slot.remove();
|
||||
}
|
||||
|
||||
async function bootstrapPage() {
|
||||
const slots = Array.from(document.querySelectorAll("[data-partial]"));
|
||||
await Promise.all(slots.map((slot) => loadPartial(slot)));
|
||||
await import("./app.js");
|
||||
}
|
||||
|
||||
bootstrapPage().catch((error) => {
|
||||
document.body.innerHTML = `<pre>${error.message || String(error)}</pre>`;
|
||||
});
|
||||
176
web/js/logs.js
176
web/js/logs.js
|
|
@ -1,176 +0,0 @@
|
|||
import { appendChartPoint } from "./chart.js";
|
||||
import { dom } from "./dom.js";
|
||||
import { prependEvent } from "./events.js";
|
||||
import { formatValue } from "./points.js";
|
||||
import { state } from "./state.js";
|
||||
import { loadUnits, renderUnits } from "./units.js";
|
||||
import { loadEquipments } from "./equipment.js";
|
||||
import { showToast } from "./api.js";
|
||||
|
||||
function escapeHtml(text) {
|
||||
return text.replaceAll("&", "&").replaceAll("<", "<").replaceAll(">", ">");
|
||||
}
|
||||
|
||||
function parseLogLine(line) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed.startsWith("{") || !trimmed.endsWith("}")) return null;
|
||||
try { return JSON.parse(trimmed); } catch { return null; }
|
||||
}
|
||||
|
||||
function appendLog(line) {
|
||||
if (!dom.logView) return;
|
||||
const atBottom = dom.logView.scrollTop + dom.logView.clientHeight >= dom.logView.scrollHeight - 10;
|
||||
const div = document.createElement("div");
|
||||
const parsed = parseLogLine(line);
|
||||
if (!parsed) {
|
||||
div.className = "log-line";
|
||||
div.textContent = line;
|
||||
} else {
|
||||
const levelRaw = (parsed.level || "").toString();
|
||||
const level = levelRaw.toLowerCase();
|
||||
div.className = `log-line${level ? ` level-${level}` : ""}`;
|
||||
div.innerHTML = [
|
||||
`<span class="level">${escapeHtml(levelRaw || "LOG")}</span>`,
|
||||
parsed.timestamp ? `<span class="muted"> ${escapeHtml(parsed.timestamp)}</span>` : "",
|
||||
parsed.target ? `<span class="muted"> ${escapeHtml(parsed.target)}</span>` : "",
|
||||
`<span class="message">${escapeHtml(parsed.fields?.message || parsed.message || parsed.msg || line)}</span>`,
|
||||
].join("");
|
||||
}
|
||||
dom.logView.appendChild(div);
|
||||
if (atBottom) dom.logView.scrollTop = dom.logView.scrollHeight;
|
||||
}
|
||||
|
||||
function appendLogDivider(text) {
|
||||
if (!dom.logView) return;
|
||||
const atBottom = dom.logView.scrollTop + dom.logView.clientHeight >= dom.logView.scrollHeight - 10;
|
||||
const div = document.createElement("div");
|
||||
div.className = "log-line muted";
|
||||
div.textContent = text;
|
||||
dom.logView.appendChild(div);
|
||||
if (atBottom) dom.logView.scrollTop = dom.logView.scrollHeight;
|
||||
}
|
||||
|
||||
export function startLogs() {
|
||||
if (state.logSource) return;
|
||||
let currentLogFile = null;
|
||||
state.logSource = new EventSource("/api/logs/stream");
|
||||
state.logSource.addEventListener("log", (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
if (data.reset && data.file && data.file !== currentLogFile) {
|
||||
appendLogDivider(`[log switched to ${data.file}]`);
|
||||
}
|
||||
currentLogFile = data.file || currentLogFile;
|
||||
(data.lines || []).forEach(appendLog);
|
||||
});
|
||||
state.logSource.addEventListener("error", () => appendLog("[log stream error]"));
|
||||
}
|
||||
|
||||
export function stopLogs() {
|
||||
if (state.logSource) {
|
||||
state.logSource.close();
|
||||
state.logSource = null;
|
||||
}
|
||||
}
|
||||
|
||||
let _disconnectToast = null;
|
||||
|
||||
function setWsStatus(connected) {
|
||||
if (dom.wsDot) {
|
||||
dom.wsDot.className = `ws-dot ${connected ? "connected" : "disconnected"}`;
|
||||
}
|
||||
if (dom.wsLabel) {
|
||||
dom.wsLabel.textContent = connected ? "已连接" : "连接断开,重连中…";
|
||||
}
|
||||
if (!connected && !_disconnectToast) {
|
||||
_disconnectToast = showToast("后端连接断开", {
|
||||
message: "正在重连,请稍候…",
|
||||
level: "error",
|
||||
duration: 0,
|
||||
shake: true,
|
||||
});
|
||||
} else if (connected && _disconnectToast) {
|
||||
_disconnectToast.dismiss();
|
||||
_disconnectToast = null;
|
||||
showToast("连接已恢复", { level: "success", duration: 3000 });
|
||||
}
|
||||
}
|
||||
|
||||
let _reconnectDelay = 1000;
|
||||
let _connectedOnce = false;
|
||||
|
||||
export function startPointSocket() {
|
||||
const protocol = location.protocol === "https:" ? "wss" : "ws";
|
||||
const ws = new WebSocket(`${protocol}://${location.host}/ws/public`);
|
||||
state.pointSocket = ws;
|
||||
|
||||
ws.onopen = () => {
|
||||
setWsStatus(true);
|
||||
_reconnectDelay = 1000;
|
||||
if (_connectedOnce) {
|
||||
loadUnits().catch(() => {});
|
||||
if (state.activeView === "config") loadEquipments().catch(() => {});
|
||||
}
|
||||
_connectedOnce = true;
|
||||
};
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
try {
|
||||
const payload = JSON.parse(event.data);
|
||||
if (payload.type === "PointNewValue" || payload.type === "point_new_value") {
|
||||
const data = payload.data;
|
||||
|
||||
// config view point table
|
||||
const entry = state.pointEls.get(data.point_id);
|
||||
if (entry) {
|
||||
entry.value.textContent = formatValue(data);
|
||||
entry.quality.className = `badge quality-${(data.quality || "unknown").toLowerCase()}`;
|
||||
entry.quality.textContent = (data.quality || "unknown").toUpperCase();
|
||||
entry.time.textContent = data.timestamp || "--";
|
||||
}
|
||||
|
||||
// ops view signal pill
|
||||
const opsEntry = state.opsPointEls.get(data.point_id);
|
||||
if (opsEntry) {
|
||||
const { pillEl, syncBtns } = opsEntry;
|
||||
state.opsSignalCache.set(data.point_id, { quality: data.quality, value_text: data.value_text });
|
||||
const role = pillEl.dataset.opsRole;
|
||||
import("./ops.js").then(({ sigPillClass }) => {
|
||||
pillEl.className = sigPillClass(role, data.quality, data.value_text);
|
||||
syncBtns?.();
|
||||
});
|
||||
}
|
||||
|
||||
if (state.chartPointId === data.point_id) {
|
||||
appendChartPoint(data);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (payload.type === "EventCreated" || payload.type === "event_created") {
|
||||
prependEvent(payload.data);
|
||||
}
|
||||
|
||||
if (payload.type === "UnitRuntimeChanged") {
|
||||
const runtime = payload.data;
|
||||
state.runtimes.set(runtime.unit_id, runtime);
|
||||
renderUnits();
|
||||
// lazy import to avoid circular dep (ops.js -> logs.js -> ops.js)
|
||||
import("./ops.js").then(({ renderOpsUnits, syncEquipmentButtonsForUnit }) => {
|
||||
renderOpsUnits();
|
||||
syncEquipmentButtonsForUnit(runtime.unit_id);
|
||||
});
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
// ignore malformed messages
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
setWsStatus(false);
|
||||
window.setTimeout(startPointSocket, _reconnectDelay);
|
||||
_reconnectDelay = Math.min(_reconnectDelay * 2, 30000);
|
||||
};
|
||||
|
||||
ws.onerror = () => setWsStatus(false);
|
||||
}
|
||||
232
web/js/ops.js
232
web/js/ops.js
|
|
@ -1,232 +0,0 @@
|
|||
import { apiFetch } from "./api.js";
|
||||
import { dom } from "./dom.js";
|
||||
import { state } from "./state.js";
|
||||
import { loadUnits } from "./units.js";
|
||||
|
||||
const SIGNAL_ROLES = ["rem", "run", "flt"];
|
||||
const ROLE_LABELS = { rem: "REM", run: "RUN", flt: "FLT" };
|
||||
|
||||
function isSignalOn(quality, valueText) {
|
||||
if (!quality || quality.toLowerCase() !== "good") return false;
|
||||
const v = String(valueText ?? "").trim().toLowerCase();
|
||||
return v === "1" || v === "true" || v === "on";
|
||||
}
|
||||
|
||||
export function sigPillClass(role, quality, valueText) {
|
||||
if (!quality || quality.toLowerCase() !== "good") return "sig-pill sig-warn";
|
||||
const on = isSignalOn(quality, valueText);
|
||||
if (!on) return "sig-pill";
|
||||
return role === "flt" ? "sig-pill sig-fault" : "sig-pill sig-on";
|
||||
}
|
||||
|
||||
function runtimeBadge(runtime) {
|
||||
if (!runtime) return '<span class="badge offline">OFFLINE</span>';
|
||||
if (runtime.comm_locked) return '<span class="badge offline">COMM ERR</span>';
|
||||
if (runtime.fault_locked) return '<span class="badge danger">FAULT</span>';
|
||||
const labels = { stopped: "STOPPED", running: "RUNNING", distributor_running: "DIST RUN", fault_locked: "FAULT", comm_locked: "COMM ERR" };
|
||||
const cls = { stopped: "", running: "online", distributor_running: "online", fault_locked: "danger", comm_locked: "offline" };
|
||||
return `<span class="badge ${cls[runtime.state] ?? ""}">${labels[runtime.state] ?? runtime.state}</span>`;
|
||||
}
|
||||
|
||||
export function renderOpsUnits() {
|
||||
if (!dom.opsUnitList) return;
|
||||
dom.opsUnitList.innerHTML = "";
|
||||
|
||||
if (!state.units.length) {
|
||||
dom.opsUnitList.innerHTML = '<div class="muted" style="padding:12px">暂无控制单元</div>';
|
||||
return;
|
||||
}
|
||||
|
||||
state.units.forEach((unit) => {
|
||||
const runtime = state.runtimes.get(unit.id);
|
||||
const item = document.createElement("div");
|
||||
item.className = `ops-unit-item${state.selectedOpsUnitId === unit.id ? " selected" : ""}`;
|
||||
item.innerHTML = `
|
||||
<div class="ops-unit-item-name">${unit.code} / ${unit.name}</div>
|
||||
<div class="ops-unit-item-meta">
|
||||
${runtimeBadge(runtime)}
|
||||
<span class="badge ${unit.enabled ? "" : "offline"}">${unit.enabled ? "EN" : "DIS"}</span>
|
||||
${runtime ? `<span class="muted">Acc ${Math.floor(runtime.display_acc_sec / 1000)}s</span>` : ""}
|
||||
</div>
|
||||
<div class="ops-unit-item-actions"></div>
|
||||
`;
|
||||
item.addEventListener("click", () => selectOpsUnit(unit.id));
|
||||
|
||||
const actions = item.querySelector(".ops-unit-item-actions");
|
||||
|
||||
const isAutoOn = runtime?.auto_enabled;
|
||||
const startBlocked = !isAutoOn && (runtime?.fault_locked || runtime?.manual_ack_required || runtime?.rem_local);
|
||||
const autoBtn = document.createElement("button");
|
||||
autoBtn.className = isAutoOn ? "danger" : "secondary";
|
||||
autoBtn.textContent = isAutoOn ? "Stop Auto" : "Start Auto";
|
||||
autoBtn.disabled = startBlocked;
|
||||
autoBtn.title = startBlocked
|
||||
? (runtime?.fault_locked ? "设备故障中,无法启动自动控制"
|
||||
: runtime?.rem_local ? "设备处于本地模式(REM关),无法启动自动控制"
|
||||
: "需人工确认故障后才可启动自动控制")
|
||||
: (isAutoOn ? "停止自动控制" : "启动自动控制");
|
||||
autoBtn.addEventListener("click", (e) => {
|
||||
e.stopPropagation();
|
||||
apiFetch(`/api/control/unit/${unit.id}/${isAutoOn ? "stop-auto" : "start-auto"}`, { method: "POST" })
|
||||
.then(() => loadUnits()).catch(() => {});
|
||||
});
|
||||
actions.append(autoBtn);
|
||||
|
||||
if (runtime?.manual_ack_required) {
|
||||
const ackBtn = document.createElement("button");
|
||||
ackBtn.className = "danger";
|
||||
ackBtn.textContent = "Ack Fault";
|
||||
ackBtn.title = "人工确认解除故障锁定";
|
||||
ackBtn.addEventListener("click", (e) => {
|
||||
e.stopPropagation();
|
||||
apiFetch(`/api/control/unit/${unit.id}/ack-fault`, { method: "POST" })
|
||||
.then(() => loadUnits()).catch(() => {});
|
||||
});
|
||||
actions.append(ackBtn);
|
||||
}
|
||||
|
||||
dom.opsUnitList.appendChild(item);
|
||||
});
|
||||
}
|
||||
|
||||
function selectOpsUnit(unitId) {
|
||||
state.selectedOpsUnitId = unitId === state.selectedOpsUnitId ? null : unitId;
|
||||
renderOpsUnits();
|
||||
state.opsPointEls.clear();
|
||||
|
||||
if (!state.selectedOpsUnitId) {
|
||||
renderOpsEquipments(state.units.flatMap((u) => u.equipments || []));
|
||||
return;
|
||||
}
|
||||
|
||||
const unit = state.unitMap.get(unitId);
|
||||
renderOpsEquipments(unit ? (unit.equipments || []) : []);
|
||||
}
|
||||
|
||||
export function loadAllEquipmentCards() {
|
||||
if (!dom.opsEquipmentArea) return;
|
||||
state.opsPointEls.clear();
|
||||
renderOpsEquipments(state.units.flatMap((u) => u.equipments || []));
|
||||
}
|
||||
|
||||
function renderOpsEquipments(equipments) {
|
||||
dom.opsEquipmentArea.innerHTML = "";
|
||||
state.opsUnitSyncFns.clear();
|
||||
|
||||
if (!equipments.length) {
|
||||
dom.opsEquipmentArea.innerHTML = '<div class="muted ops-placeholder">该单元下暂无设备</div>';
|
||||
return;
|
||||
}
|
||||
|
||||
equipments.forEach((eq) => {
|
||||
const card = document.createElement("div");
|
||||
card.className = "ops-eq-card";
|
||||
|
||||
const roleMap = {};
|
||||
(eq.role_points || []).forEach((p) => { roleMap[p.signal_role] = p; });
|
||||
|
||||
// Signal pills — one pill per bound role, text label inside
|
||||
const signalRowsHtml = SIGNAL_ROLES.map((role) => {
|
||||
const point = roleMap[role];
|
||||
if (!point) return "";
|
||||
return `<span class="sig-pill sig-warn" data-ops-dot="${point.point_id}" data-ops-role="${role}">${ROLE_LABELS[role] || role}</span>`;
|
||||
}).join("");
|
||||
|
||||
const canControl = eq.kind === "coal_feeder" || eq.kind === "distributor";
|
||||
const unitId = eq.unit_id ?? null;
|
||||
|
||||
card.innerHTML = `
|
||||
<div class="ops-eq-card-head">
|
||||
<strong title="${eq.name}">${eq.code}</strong>
|
||||
<span class="badge">${eq.kind || "--"}</span>
|
||||
</div>
|
||||
<div class="ops-signal-rows">${signalRowsHtml || '<span class="muted" style="font-size:11px;padding:2px 0">无绑定信号</span>'}</div>
|
||||
${canControl ? `<div class="ops-eq-card-actions" data-unit-id="${unitId || ""}"></div>` : ""}
|
||||
`;
|
||||
|
||||
let syncBtns = null;
|
||||
|
||||
if (canControl) {
|
||||
const actions = card.querySelector(".ops-eq-card-actions");
|
||||
const remPointId = roleMap["rem"]?.point_id ?? null;
|
||||
const fltPointId = roleMap["flt"]?.point_id ?? null;
|
||||
|
||||
const startBtn = document.createElement("button");
|
||||
startBtn.className = "secondary";
|
||||
startBtn.textContent = "Start";
|
||||
startBtn.addEventListener("click", () =>
|
||||
apiFetch(`/api/control/equipment/${eq.id}/start`, { method: "POST" }).catch(() => {})
|
||||
);
|
||||
const stopBtn = document.createElement("button");
|
||||
stopBtn.className = "danger";
|
||||
stopBtn.textContent = "Stop";
|
||||
stopBtn.addEventListener("click", () =>
|
||||
apiFetch(`/api/control/equipment/${eq.id}/stop`, { method: "POST" }).catch(() => {})
|
||||
);
|
||||
actions.append(startBtn, stopBtn);
|
||||
|
||||
syncBtns = function () {
|
||||
const autoOn = !!(unitId && state.runtimes.get(unitId)?.auto_enabled);
|
||||
const remSig = remPointId ? state.opsSignalCache.get(remPointId) : null;
|
||||
const fltSig = fltPointId ? state.opsSignalCache.get(fltPointId) : null;
|
||||
const remOk = !remPointId || isSignalOn(remSig?.quality, remSig?.value_text);
|
||||
const fltActive = !!(fltPointId && isSignalOn(fltSig?.quality, fltSig?.value_text));
|
||||
const disabled = autoOn || !remOk || fltActive;
|
||||
const title = autoOn ? "自动控制运行中,请先停止自动"
|
||||
: !remOk ? "设备未切换至远程模式"
|
||||
: fltActive ? "设备故障中"
|
||||
: "";
|
||||
startBtn.disabled = disabled;
|
||||
stopBtn.disabled = disabled;
|
||||
startBtn.title = title;
|
||||
stopBtn.title = title;
|
||||
};
|
||||
}
|
||||
|
||||
dom.opsEquipmentArea.appendChild(card);
|
||||
|
||||
// Register pills for WS updates; seed signal cache from initial point_monitor data
|
||||
SIGNAL_ROLES.forEach((role) => {
|
||||
const point = roleMap[role];
|
||||
if (!point) return;
|
||||
const pillEl = card.querySelector(`[data-ops-dot="${point.point_id}"]`);
|
||||
if (!pillEl) return;
|
||||
if (point.point_monitor) {
|
||||
const m = point.point_monitor;
|
||||
state.opsSignalCache.set(point.point_id, { quality: m.quality, value_text: m.value_text });
|
||||
pillEl.className = sigPillClass(role, m.quality, m.value_text);
|
||||
}
|
||||
const isSyncRole = canControl && (role === "rem" || role === "flt");
|
||||
state.opsPointEls.set(point.point_id, { pillEl, syncBtns: isSyncRole ? syncBtns : null });
|
||||
});
|
||||
|
||||
if (canControl) {
|
||||
syncBtns();
|
||||
if (unitId) {
|
||||
if (!state.opsUnitSyncFns.has(unitId)) state.opsUnitSyncFns.set(unitId, new Set());
|
||||
state.opsUnitSyncFns.get(unitId).add(syncBtns);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function startOps() {
|
||||
renderOpsUnits();
|
||||
|
||||
dom.batchStartAutoBtn?.addEventListener("click", () => {
|
||||
apiFetch("/api/control/unit/batch-start-auto", { method: "POST" })
|
||||
.then(() => loadUnits())
|
||||
.catch(() => {});
|
||||
});
|
||||
|
||||
dom.batchStopAutoBtn?.addEventListener("click", () => {
|
||||
apiFetch("/api/control/unit/batch-stop-auto", { method: "POST" })
|
||||
.then(() => loadUnits())
|
||||
.catch(() => {});
|
||||
});
|
||||
}
|
||||
|
||||
/** Called by WS handler when a unit's runtime changes — re-evaluates all equipment button states. */
|
||||
export function syncEquipmentButtonsForUnit(unitId) {
|
||||
state.opsUnitSyncFns.get(unitId)?.forEach((fn) => fn());
|
||||
}
|
||||
363
web/js/points.js
363
web/js/points.js
|
|
@ -1,363 +0,0 @@
|
|||
import { apiFetch } from "./api.js";
|
||||
import { openChart } from "./chart.js";
|
||||
import { dom } from "./dom.js";
|
||||
import {
|
||||
loadEquipments,
|
||||
renderBatchBindingDefaults,
|
||||
renderBindingEquipmentOptions,
|
||||
} from "./equipment.js";
|
||||
import { renderRoleOptions } from "./roles.js";
|
||||
import { state } from "./state.js";
|
||||
|
||||
function updatePointSourceNodeCount() {
|
||||
const count = dom.nodeTree.querySelectorAll("details").length;
|
||||
dom.pointSourceNodeCount.textContent = `Nodes: ${count}`;
|
||||
}
|
||||
|
||||
export function formatValue(monitor) {
|
||||
if (!monitor) {
|
||||
return "--";
|
||||
}
|
||||
if (monitor.value_text) {
|
||||
return monitor.value_text;
|
||||
}
|
||||
if (monitor.value === null || monitor.value === undefined) {
|
||||
return "--";
|
||||
}
|
||||
return typeof monitor.value === "string" ? monitor.value : JSON.stringify(monitor.value);
|
||||
}
|
||||
|
||||
export function renderSelectedNodes() {
|
||||
dom.selectedCount.textContent = `Selected ${state.selectedNodeIds.size} nodes`;
|
||||
}
|
||||
|
||||
export function updateSelectedPointSummary() {
|
||||
const count = state.selectedPointIds.size;
|
||||
dom.selectedPointCount.textContent = `Selected ${count} points`;
|
||||
dom.batchBindingSummary.textContent = `Selected ${count} points`;
|
||||
dom.openBatchBindingBtn.disabled = count === 0;
|
||||
}
|
||||
|
||||
export function updatePointFilterSummary() {
|
||||
const filters = [];
|
||||
if (state.selectedEquipmentId) {
|
||||
const equipment = state.equipmentMap.get(state.selectedEquipmentId);
|
||||
filters.push(`Equipment:${equipment?.name || equipment?.code || "Unknown"}`);
|
||||
}
|
||||
if (state.selectedSourceId) {
|
||||
const source = state.sources.find((item) => item.id === state.selectedSourceId);
|
||||
filters.push(`Source:${source?.name || "Unknown"}`);
|
||||
}
|
||||
|
||||
dom.pointFilterSummary.textContent = filters.length
|
||||
? `Current filter: ${filters.join(" / ")}`
|
||||
: "Current filter: All points";
|
||||
}
|
||||
|
||||
export function clearSelectedPoints() {
|
||||
state.selectedPointIds.clear();
|
||||
dom.toggleAllPoints.checked = false;
|
||||
dom.pointList
|
||||
.querySelectorAll('input[data-point-select="true"]')
|
||||
.forEach((input) => (input.checked = false));
|
||||
updateSelectedPointSummary();
|
||||
}
|
||||
|
||||
function renderNode(node) {
|
||||
const details = document.createElement("details");
|
||||
const summary = document.createElement("summary");
|
||||
|
||||
if (node.children?.length) {
|
||||
summary.classList.add("has-children");
|
||||
}
|
||||
|
||||
const checkbox = document.createElement("input");
|
||||
checkbox.type = "checkbox";
|
||||
checkbox.checked = state.selectedNodeIds.has(node.id);
|
||||
checkbox.addEventListener("change", () => {
|
||||
if (checkbox.checked) {
|
||||
state.selectedNodeIds.add(node.id);
|
||||
} else {
|
||||
state.selectedNodeIds.delete(node.id);
|
||||
}
|
||||
renderSelectedNodes();
|
||||
});
|
||||
|
||||
const label = document.createElement("span");
|
||||
label.className = "node-label";
|
||||
label.textContent = `${node.display_name || node.browse_name} (${node.node_class})`;
|
||||
|
||||
summary.append(checkbox, label);
|
||||
details.appendChild(summary);
|
||||
|
||||
(node.children || []).forEach((child) => {
|
||||
details.appendChild(renderNode(child));
|
||||
});
|
||||
|
||||
return details;
|
||||
}
|
||||
|
||||
export function openPointCreateModal() {
|
||||
dom.pointModal.classList.remove("hidden");
|
||||
if (dom.pointSourceSelect) {
|
||||
dom.pointSourceSelect.value = state.selectedSourceId || "";
|
||||
}
|
||||
dom.nodeTree.innerHTML = '<div class="muted">Select a source and load nodes</div>';
|
||||
dom.pointSourceNodeCount.textContent = "Nodes: 0";
|
||||
state.selectedNodeIds.clear();
|
||||
renderSelectedNodes();
|
||||
}
|
||||
|
||||
export async function loadTree() {
|
||||
const sourceId = dom.pointSourceSelect.value || state.selectedSourceId;
|
||||
if (!sourceId) {
|
||||
dom.nodeTree.innerHTML = '<div class="muted">Select a source</div>';
|
||||
dom.pointSourceNodeCount.textContent = "Nodes: 0";
|
||||
return;
|
||||
}
|
||||
|
||||
state.selectedSourceId = sourceId;
|
||||
const data = await apiFetch(`/api/source/${sourceId}/node-tree`);
|
||||
dom.nodeTree.innerHTML = "";
|
||||
(data || []).forEach((node) => dom.nodeTree.appendChild(renderNode(node)));
|
||||
updatePointSourceNodeCount();
|
||||
}
|
||||
|
||||
export async function browseAndLoadTree() {
|
||||
const sourceId = dom.pointSourceSelect.value || state.selectedSourceId;
|
||||
if (!sourceId) {
|
||||
throw new Error("Select a source first");
|
||||
}
|
||||
|
||||
state.selectedSourceId = sourceId;
|
||||
await apiFetch(`/api/source/${sourceId}/browse`, { method: "POST" });
|
||||
await loadTree();
|
||||
}
|
||||
|
||||
export async function createPoints() {
|
||||
if (!state.selectedNodeIds.size) {
|
||||
return;
|
||||
}
|
||||
|
||||
await apiFetch("/api/point/batch", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ node_ids: Array.from(state.selectedNodeIds) }),
|
||||
});
|
||||
|
||||
state.selectedNodeIds.clear();
|
||||
renderSelectedNodes();
|
||||
dom.pointModal.classList.add("hidden");
|
||||
await loadPoints();
|
||||
}
|
||||
|
||||
function setPointSelected(pointId, checked) {
|
||||
if (checked) {
|
||||
state.selectedPointIds.add(pointId);
|
||||
} else {
|
||||
state.selectedPointIds.delete(pointId);
|
||||
}
|
||||
updateSelectedPointSummary();
|
||||
}
|
||||
|
||||
export async function loadPoints() {
|
||||
const params = new URLSearchParams({
|
||||
page: String(state.pointsPage),
|
||||
page_size: String(state.pointsPageSize),
|
||||
});
|
||||
if (state.selectedSourceId) {
|
||||
params.set("source_id", state.selectedSourceId);
|
||||
}
|
||||
if (state.selectedEquipmentId) {
|
||||
params.set("equipment_id", state.selectedEquipmentId);
|
||||
}
|
||||
|
||||
const data = await apiFetch(`/api/point?${params.toString()}`);
|
||||
const items = data.data || [];
|
||||
state.pointsTotal = typeof data.total === "number" ? data.total : items.length;
|
||||
state.pointEls.clear();
|
||||
dom.pointList.innerHTML = "";
|
||||
|
||||
if (!items.length) {
|
||||
dom.pointList.innerHTML = '<tr><td colspan="7" class="empty-state">No points</td></tr>';
|
||||
dom.pointsPageInfo.textContent = `${state.pointsPage} / 1`;
|
||||
clearSelectedPoints();
|
||||
updatePointFilterSummary();
|
||||
return;
|
||||
}
|
||||
|
||||
items.forEach((item) => {
|
||||
const point = item.point || item;
|
||||
const monitor = item.point_monitor || null;
|
||||
const equipment = point.equipment_id ? state.equipmentMap.get(point.equipment_id) : null;
|
||||
const tr = document.createElement("tr");
|
||||
|
||||
tr.addEventListener("click", () => {
|
||||
openChart(point.id, point.name).catch((error) => {
|
||||
dom.statusText.textContent = error.message;
|
||||
});
|
||||
});
|
||||
|
||||
tr.innerHTML = `
|
||||
<td></td>
|
||||
<td>
|
||||
<div class="point-name">${point.name}</div>
|
||||
<div class="point-id">${point.node_id}</div>
|
||||
</td>
|
||||
<td><span class="point-value">${formatValue(monitor)}</span></td>
|
||||
<td><span class="badge quality-${(monitor?.quality || "unknown").toLowerCase()}">${(monitor?.quality || "unknown").toUpperCase()}</span></td>
|
||||
<td>
|
||||
<div class="point-meta">
|
||||
<div>${equipment ? equipment.name : '<span class="muted">Unbound</span>'}</div>
|
||||
<div class="point-role">${point.signal_role || "--"}</div>
|
||||
</div>
|
||||
</td>
|
||||
<td><span class="muted">${monitor?.timestamp || "--"}</span></td>
|
||||
<td></td>
|
||||
`;
|
||||
|
||||
const selectCell = tr.children[0];
|
||||
const checkbox = document.createElement("input");
|
||||
checkbox.type = "checkbox";
|
||||
checkbox.dataset.pointSelect = "true";
|
||||
checkbox.checked = state.selectedPointIds.has(point.id);
|
||||
checkbox.addEventListener("click", (event) => event.stopPropagation());
|
||||
checkbox.addEventListener("change", () => setPointSelected(point.id, checkbox.checked));
|
||||
selectCell.appendChild(checkbox);
|
||||
|
||||
const actionCell = tr.lastElementChild;
|
||||
actionCell.className = "point-actions";
|
||||
const editBtn = document.createElement("button");
|
||||
editBtn.className = "secondary";
|
||||
editBtn.textContent = "Edit";
|
||||
editBtn.addEventListener("click", (event) => {
|
||||
event.stopPropagation();
|
||||
openPointBinding(point);
|
||||
});
|
||||
|
||||
const deleteBtn = document.createElement("button");
|
||||
deleteBtn.className = "danger";
|
||||
deleteBtn.textContent = "Delete";
|
||||
deleteBtn.addEventListener("click", (event) => {
|
||||
event.stopPropagation();
|
||||
deletePoint(point.id).catch((error) => {
|
||||
dom.statusText.textContent = error.message;
|
||||
});
|
||||
});
|
||||
|
||||
actionCell.append(editBtn, deleteBtn);
|
||||
dom.pointList.appendChild(tr);
|
||||
|
||||
state.pointEls.set(point.id, {
|
||||
row: tr,
|
||||
value: tr.querySelector(".point-value"),
|
||||
quality: tr.querySelector(".badge"),
|
||||
time: tr.querySelector("td:nth-child(6) .muted"),
|
||||
});
|
||||
});
|
||||
|
||||
const totalPages = Math.max(1, Math.ceil(state.pointsTotal / state.pointsPageSize));
|
||||
dom.pointsPageInfo.textContent = `${state.pointsPage} / ${totalPages}`;
|
||||
const pageCheckboxes = dom.pointList.querySelectorAll('input[data-point-select="true"]');
|
||||
dom.toggleAllPoints.checked =
|
||||
pageCheckboxes.length > 0 && Array.from(pageCheckboxes).every((input) => input.checked);
|
||||
updateSelectedPointSummary();
|
||||
updatePointFilterSummary();
|
||||
}
|
||||
|
||||
export function openPointBinding(point) {
|
||||
dom.bindingPointId.value = point.id;
|
||||
dom.bindingPointName.value = point.name || "";
|
||||
dom.bindingPointName.disabled = false;
|
||||
const modalTitle = dom.pointBindingModal.querySelector("h3");
|
||||
if (modalTitle) {
|
||||
modalTitle.textContent = "Edit Point";
|
||||
}
|
||||
if (dom.clearPointBindingBtn) {
|
||||
dom.clearPointBindingBtn.textContent = "Clear Equipment";
|
||||
}
|
||||
const saveButton = dom.pointBindingForm?.querySelector('button[type="submit"]');
|
||||
if (saveButton) {
|
||||
saveButton.textContent = "Save";
|
||||
}
|
||||
renderBindingEquipmentOptions(point.equipment_id || "");
|
||||
dom.bindingSignalRole.innerHTML = renderRoleOptions(point.signal_role || "");
|
||||
dom.pointBindingModal.classList.remove("hidden");
|
||||
}
|
||||
|
||||
export async function savePointBinding(event) {
|
||||
event.preventDefault();
|
||||
|
||||
await apiFetch(`/api/point/${dom.bindingPointId.value}`, {
|
||||
method: "PUT",
|
||||
body: JSON.stringify({
|
||||
name: dom.bindingPointName.value.trim() || null,
|
||||
equipment_id: dom.bindingEquipmentId.value || null,
|
||||
signal_role: dom.bindingSignalRole.value || null,
|
||||
}),
|
||||
});
|
||||
|
||||
dom.pointBindingModal.classList.add("hidden");
|
||||
await loadEquipments();
|
||||
await loadPoints();
|
||||
}
|
||||
|
||||
export function openBatchBinding() {
|
||||
if (!state.selectedPointIds.size) {
|
||||
return;
|
||||
}
|
||||
renderBatchBindingDefaults();
|
||||
updateSelectedPointSummary();
|
||||
dom.batchBindingModal.classList.remove("hidden");
|
||||
}
|
||||
|
||||
export async function saveBatchBinding(event) {
|
||||
event.preventDefault();
|
||||
|
||||
if (!state.selectedPointIds.size) {
|
||||
return;
|
||||
}
|
||||
|
||||
await apiFetch("/api/point/batch/set-equipment", {
|
||||
method: "PUT",
|
||||
body: JSON.stringify({
|
||||
point_ids: Array.from(state.selectedPointIds),
|
||||
equipment_id: dom.batchBindingEquipmentId.value || null,
|
||||
signal_role: dom.batchBindingSignalRole.value || null,
|
||||
}),
|
||||
});
|
||||
|
||||
dom.batchBindingModal.classList.add("hidden");
|
||||
clearSelectedPoints();
|
||||
await loadEquipments();
|
||||
await loadPoints();
|
||||
}
|
||||
|
||||
export async function clearBatchBinding() {
|
||||
if (!state.selectedPointIds.size) {
|
||||
return;
|
||||
}
|
||||
|
||||
await apiFetch("/api/point/batch/set-equipment", {
|
||||
method: "PUT",
|
||||
body: JSON.stringify({
|
||||
point_ids: Array.from(state.selectedPointIds),
|
||||
equipment_id: null,
|
||||
signal_role: null,
|
||||
}),
|
||||
});
|
||||
|
||||
dom.batchBindingModal.classList.add("hidden");
|
||||
clearSelectedPoints();
|
||||
await loadEquipments();
|
||||
await loadPoints();
|
||||
}
|
||||
|
||||
export async function deletePoint(pointId) {
|
||||
if (!window.confirm("Delete this point?")) {
|
||||
return;
|
||||
}
|
||||
|
||||
await apiFetch(`/api/point/${pointId}`, { method: "DELETE" });
|
||||
state.selectedPointIds.delete(pointId);
|
||||
await loadPoints();
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue