fix(logs): follow rotated log files in stream
This commit is contained in:
parent
9f833f3a5e
commit
68b4eec610
|
|
@ -46,6 +46,13 @@ pub struct LogChunkResponse {
|
||||||
pub reset: bool,
|
pub reset: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
struct StreamFileState {
|
||||||
|
path: PathBuf,
|
||||||
|
file_name: String,
|
||||||
|
cursor: u64,
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn get_logs(Query(query): Query<LogQuery>) -> Result<impl IntoResponse, ApiErr> {
|
pub async fn get_logs(Query(query): Query<LogQuery>) -> Result<impl IntoResponse, ApiErr> {
|
||||||
let path = resolve_log_file(query.file.as_deref()).await?;
|
let path = resolve_log_file(query.file.as_deref()).await?;
|
||||||
let file_name = file_name_of(&path);
|
let file_name = file_name_of(&path);
|
||||||
|
|
@ -74,17 +81,44 @@ pub async fn stream_logs(Query(query): Query<LogQuery>) -> Result<impl IntoRespo
|
||||||
.max_bytes
|
.max_bytes
|
||||||
.unwrap_or(STREAM_MAX_BYTES)
|
.unwrap_or(STREAM_MAX_BYTES)
|
||||||
.clamp(1, MAX_MAX_BYTES);
|
.clamp(1, MAX_MAX_BYTES);
|
||||||
|
let follow_latest = query.file.is_none();
|
||||||
let start_cursor = query.cursor.unwrap_or(file_len(&path).await?);
|
let start_cursor = query.cursor.unwrap_or(file_len(&path).await?);
|
||||||
|
|
||||||
let event_stream = stream! {
|
let event_stream = stream! {
|
||||||
let mut ticker = interval(Duration::from_millis(800));
|
let mut ticker = interval(Duration::from_millis(800));
|
||||||
let mut cursor = start_cursor;
|
let mut stream_file = StreamFileState {
|
||||||
|
path,
|
||||||
|
file_name,
|
||||||
|
cursor: start_cursor,
|
||||||
|
};
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
ticker.tick().await;
|
ticker.tick().await;
|
||||||
match read_since(&path, &file_name, cursor, max_bytes).await {
|
let switched = if follow_latest {
|
||||||
|
match latest_log_file(Path::new(LOG_DIR)).await {
|
||||||
|
Ok(latest_path) => {
|
||||||
|
let latest = StreamFileState {
|
||||||
|
file_name: file_name_of(&latest_path),
|
||||||
|
path: latest_path,
|
||||||
|
cursor: 0,
|
||||||
|
};
|
||||||
|
let (next, switched) = advance_stream_file(&stream_file, &latest);
|
||||||
|
stream_file = next;
|
||||||
|
switched
|
||||||
|
}
|
||||||
|
Err(_) => false,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
};
|
||||||
|
|
||||||
|
match read_since(&stream_file.path, &stream_file.file_name, stream_file.cursor, max_bytes).await {
|
||||||
Ok(chunk) => {
|
Ok(chunk) => {
|
||||||
cursor = chunk.cursor;
|
stream_file.cursor = chunk.cursor;
|
||||||
|
let chunk = LogChunkResponse {
|
||||||
|
reset: chunk.reset || switched,
|
||||||
|
..chunk
|
||||||
|
};
|
||||||
if chunk.reset || !chunk.lines.is_empty() {
|
if chunk.reset || !chunk.lines.is_empty() {
|
||||||
match Event::default().event("log").json_data(&chunk) {
|
match Event::default().event("log").json_data(&chunk) {
|
||||||
Ok(event) => yield Ok::<Event, Infallible>(event),
|
Ok(event) => yield Ok::<Event, Infallible>(event),
|
||||||
|
|
@ -267,9 +301,54 @@ fn file_name_of(path: &Path) -> String {
|
||||||
.to_string()
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn advance_stream_file(
|
||||||
|
current: &StreamFileState,
|
||||||
|
latest: &StreamFileState,
|
||||||
|
) -> (StreamFileState, bool) {
|
||||||
|
if current.path == latest.path {
|
||||||
|
return (current.clone(), false);
|
||||||
|
}
|
||||||
|
|
||||||
|
(
|
||||||
|
StreamFileState {
|
||||||
|
path: latest.path.clone(),
|
||||||
|
file_name: latest.file_name.clone(),
|
||||||
|
cursor: 0,
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
fn map_open_err(err: std::io::Error) -> ApiErr {
|
fn map_open_err(err: std::io::Error) -> ApiErr {
|
||||||
match err.kind() {
|
match err.kind() {
|
||||||
std::io::ErrorKind::NotFound => ApiErr::NotFound("log file not found".to_string(), None),
|
std::io::ErrorKind::NotFound => ApiErr::NotFound("log file not found".to_string(), None),
|
||||||
_ => ApiErr::Internal("failed to access log file".to_string(), None),
|
_ => ApiErr::Internal("failed to access log file".to_string(), None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{advance_stream_file, StreamFileState};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn advance_stream_file_switches_to_latest_file_and_resets_cursor() {
|
||||||
|
let current = StreamFileState {
|
||||||
|
path: PathBuf::from("logs/app.log"),
|
||||||
|
file_name: "app.log".to_string(),
|
||||||
|
cursor: 128,
|
||||||
|
};
|
||||||
|
let latest = StreamFileState {
|
||||||
|
path: PathBuf::from("logs/app.log.1"),
|
||||||
|
file_name: "app.log.1".to_string(),
|
||||||
|
cursor: 42,
|
||||||
|
};
|
||||||
|
|
||||||
|
let (next, switched) = advance_stream_file(¤t, &latest);
|
||||||
|
|
||||||
|
assert!(switched);
|
||||||
|
assert_eq!(next.path, latest.path);
|
||||||
|
assert_eq!(next.file_name, latest.file_name);
|
||||||
|
assert_eq!(next.cursor, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -40,11 +40,26 @@ function appendLog(line) {
|
||||||
if (atBottom) dom.logView.scrollTop = dom.logView.scrollHeight;
|
if (atBottom) dom.logView.scrollTop = dom.logView.scrollHeight;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function appendLogDivider(text) {
|
||||||
|
if (!dom.logView) return;
|
||||||
|
const atBottom = dom.logView.scrollTop + dom.logView.clientHeight >= dom.logView.scrollHeight - 10;
|
||||||
|
const div = document.createElement("div");
|
||||||
|
div.className = "log-line muted";
|
||||||
|
div.textContent = text;
|
||||||
|
dom.logView.appendChild(div);
|
||||||
|
if (atBottom) dom.logView.scrollTop = dom.logView.scrollHeight;
|
||||||
|
}
|
||||||
|
|
||||||
export function startLogs() {
|
export function startLogs() {
|
||||||
if (state.logSource) return;
|
if (state.logSource) return;
|
||||||
|
let currentLogFile = null;
|
||||||
state.logSource = new EventSource("/api/logs/stream");
|
state.logSource = new EventSource("/api/logs/stream");
|
||||||
state.logSource.addEventListener("log", (event) => {
|
state.logSource.addEventListener("log", (event) => {
|
||||||
const data = JSON.parse(event.data);
|
const data = JSON.parse(event.data);
|
||||||
|
if (data.reset && data.file && data.file !== currentLogFile) {
|
||||||
|
appendLogDivider(`[log switched to ${data.file}]`);
|
||||||
|
}
|
||||||
|
currentLogFile = data.file || currentLogFile;
|
||||||
(data.lines || []).forEach(appendLog);
|
(data.lines || []).forEach(appendLog);
|
||||||
});
|
});
|
||||||
state.logSource.addEventListener("error", () => appendLog("[log stream error]"));
|
state.logSource.addEventListener("error", () => appendLog("[log stream error]"));
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue