mirror of
https://github.com/clash-verge-rev/clash-verge-rev.git
synced 2026-01-29 17:02:22 +08:00
refactor: simplify log retrieval by removing level parameter and relying on server-side filtering #4293
This commit is contained in:
@@ -580,8 +580,8 @@ pub async fn clash_gc() -> CmdResult {
|
|||||||
|
|
||||||
/// 获取日志 (使用新的流式实现)
|
/// 获取日志 (使用新的流式实现)
|
||||||
#[tauri::command]
|
#[tauri::command]
|
||||||
pub async fn get_clash_logs(level: Option<String>) -> CmdResult<serde_json::Value> {
|
pub async fn get_clash_logs() -> CmdResult<serde_json::Value> {
|
||||||
Ok(ipc::get_logs_json(level).await)
|
Ok(ipc::get_logs_json().await)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// 启动日志监控
|
/// 启动日志监控
|
||||||
|
|||||||
@@ -25,11 +25,11 @@ pub struct LogItem {
|
|||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
enum LogLevel {
|
enum LogLevel {
|
||||||
Debug,
|
All = 0, // 显示所有级别(仅用于过滤)
|
||||||
Info,
|
Debug = 1, // Debug 日志类型 / Debug 过滤级别(显示 debug、warning、error,排除info)
|
||||||
Warning,
|
Info = 2, // Info 日志类型 / Info 过滤级别(显示 info、warning、error)
|
||||||
Error,
|
Warning = 3, // Warning 日志类型 / Warning 过滤级别(显示 warning、error)
|
||||||
All,
|
Error = 4, // Error 日志类型 / Error 过滤级别(只显示 error)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for LogLevel {
|
impl fmt::Display for LogLevel {
|
||||||
@@ -54,7 +54,7 @@ impl TryFrom<&str> for LogLevel {
|
|||||||
"warning" | "warn" => Ok(LogLevel::Warning),
|
"warning" | "warn" => Ok(LogLevel::Warning),
|
||||||
"error" | "err" => Ok(LogLevel::Error),
|
"error" | "err" => Ok(LogLevel::Error),
|
||||||
"all" => Ok(LogLevel::All),
|
"all" => Ok(LogLevel::All),
|
||||||
_ => Err(format!("Invalid log level: '{}'", value)),
|
_ => Err(format!("Invalid log level: '{value}'")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -67,29 +67,7 @@ impl TryFrom<String> for LogLevel {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LogLevel {
|
impl LogLevel {}
|
||||||
/// Parse from string with a default fallback
|
|
||||||
pub fn from_str_or_default(s: &str, default: LogLevel) -> LogLevel {
|
|
||||||
Self::try_from(s).unwrap_or(default)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if this log level should include logs of the specified type
|
|
||||||
pub fn should_include(&self, log_type: &str) -> bool {
|
|
||||||
match LogLevel::try_from(log_type) {
|
|
||||||
Ok(log_level) => match self {
|
|
||||||
LogLevel::All => true,
|
|
||||||
LogLevel::Debug => true, // Debug includes all levels
|
|
||||||
LogLevel::Info => log_level >= LogLevel::Info,
|
|
||||||
LogLevel::Warning => log_level >= LogLevel::Warning,
|
|
||||||
LogLevel::Error => log_level >= LogLevel::Error,
|
|
||||||
},
|
|
||||||
Err(_) => {
|
|
||||||
// If we can't parse the log type, include it by default
|
|
||||||
true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LogItem {
|
impl LogItem {
|
||||||
fn new(log_type: String, payload: String) -> Self {
|
fn new(log_type: String, payload: String) -> Self {
|
||||||
@@ -166,7 +144,7 @@ impl LogsMonitor {
|
|||||||
let filter_level = level.clone().unwrap_or_else(|| "info".to_string());
|
let filter_level = level.clone().unwrap_or_else(|| "info".to_string());
|
||||||
|
|
||||||
// Check if we're already monitoring the same level
|
// Check if we're already monitoring the same level
|
||||||
{
|
let level_changed = {
|
||||||
let current_level = self.current_monitoring_level.read().await;
|
let current_level = self.current_monitoring_level.read().await;
|
||||||
if let Some(existing_level) = current_level.as_ref() {
|
if let Some(existing_level) = current_level.as_ref() {
|
||||||
if existing_level == &filter_level {
|
if existing_level == &filter_level {
|
||||||
@@ -178,9 +156,13 @@ impl LogsMonitor {
|
|||||||
filter_level
|
filter_level
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
|
} else {
|
||||||
|
true // Level changed
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
true // First time or was stopped
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
// Stop existing monitoring task if level changed or first time
|
// Stop existing monitoring task if level changed or first time
|
||||||
{
|
{
|
||||||
@@ -196,6 +178,21 @@ impl LogsMonitor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Clear logs cache when level changes to ensure fresh data
|
||||||
|
if level_changed {
|
||||||
|
let mut current = self.current.write().await;
|
||||||
|
current.logs.clear();
|
||||||
|
current.level = filter_level.clone();
|
||||||
|
current.mark_fresh();
|
||||||
|
logging!(
|
||||||
|
info,
|
||||||
|
Type::Ipc,
|
||||||
|
true,
|
||||||
|
"LogsMonitor: Cleared logs cache due to level change to '{}'",
|
||||||
|
filter_level
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// Update current monitoring level
|
// Update current monitoring level
|
||||||
{
|
{
|
||||||
let mut current_level = self.current_monitoring_level.write().await;
|
let mut current_level = self.current_monitoring_level.write().await;
|
||||||
@@ -204,12 +201,6 @@ impl LogsMonitor {
|
|||||||
|
|
||||||
let monitor_current = self.current.clone();
|
let monitor_current = self.current.clone();
|
||||||
|
|
||||||
// Update current level in data structure
|
|
||||||
{
|
|
||||||
let mut current = monitor_current.write().await;
|
|
||||||
current.level = filter_level.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
let task = tokio::spawn(async move {
|
let task = tokio::spawn(async move {
|
||||||
loop {
|
loop {
|
||||||
// Get fresh IPC path and client for each connection attempt
|
// Get fresh IPC path and client for each connection attempt
|
||||||
@@ -222,7 +213,11 @@ impl LogsMonitor {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let url = "/logs";
|
let url = if filter_level == "all" {
|
||||||
|
"/logs".to_string()
|
||||||
|
} else {
|
||||||
|
format!("/logs?level={filter_level}")
|
||||||
|
};
|
||||||
|
|
||||||
logging!(
|
logging!(
|
||||||
info,
|
info,
|
||||||
@@ -233,11 +228,9 @@ impl LogsMonitor {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let _ = client
|
let _ = client
|
||||||
.get(url)
|
.get(&url)
|
||||||
.timeout(Duration::from_secs(30))
|
.timeout(Duration::from_secs(30))
|
||||||
.process_lines(|line| {
|
.process_lines(|line| Self::process_log_line(line, monitor_current.clone()))
|
||||||
Self::process_log_line(line, &filter_level, monitor_current.clone())
|
|
||||||
})
|
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
// Wait before retrying
|
// Wait before retrying
|
||||||
@@ -296,31 +289,26 @@ impl LogsMonitor {
|
|||||||
|
|
||||||
fn process_log_line(
|
fn process_log_line(
|
||||||
line: &str,
|
line: &str,
|
||||||
filter_level: &str,
|
|
||||||
current: Arc<RwLock<CurrentLogs>>,
|
current: Arc<RwLock<CurrentLogs>>,
|
||||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||||
if let Ok(log_data) = serde_json::from_str::<LogData>(line.trim()) {
|
if let Ok(log_data) = serde_json::from_str::<LogData>(line.trim()) {
|
||||||
// Use LogLevel enum for smarter filtering with hierarchical support
|
// Server-side filtering via query parameters handles the level filtering
|
||||||
let filter_log_level = LogLevel::from_str_or_default(filter_level, LogLevel::Info);
|
// We only need to accept all logs since filtering is done at the endpoint level
|
||||||
let should_include = filter_log_level.should_include(&log_data.log_type);
|
let log_item = LogItem::new(log_data.log_type, log_data.payload);
|
||||||
|
|
||||||
if should_include {
|
tokio::spawn(async move {
|
||||||
let log_item = LogItem::new(log_data.log_type, log_data.payload);
|
let mut logs = current.write().await;
|
||||||
|
|
||||||
tokio::spawn(async move {
|
// Add new log
|
||||||
let mut logs = current.write().await;
|
logs.logs.push_back(log_item);
|
||||||
|
|
||||||
// Add new log
|
// Keep only the last 1000 logs
|
||||||
logs.logs.push_back(log_item);
|
if logs.logs.len() > 1000 {
|
||||||
|
logs.logs.pop_front();
|
||||||
|
}
|
||||||
|
|
||||||
// Keep only the last 1000 logs
|
logs.mark_fresh();
|
||||||
if logs.logs.len() > 1000 {
|
});
|
||||||
logs.logs.pop_front();
|
|
||||||
}
|
|
||||||
|
|
||||||
logs.mark_fresh();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -341,19 +329,14 @@ impl LogsMonitor {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_logs_as_json(&self, level: Option<String>) -> serde_json::Value {
|
pub async fn get_logs_as_json(&self) -> serde_json::Value {
|
||||||
let current = self.current().await;
|
let current = self.current().await;
|
||||||
|
|
||||||
// Use the same filtering logic as process_log_line for consistency
|
// Simply return all cached logs since filtering is handled by start_monitoring
|
||||||
let filter_log_level = level
|
// and the cache is cleared when level changes
|
||||||
.as_deref()
|
let logs: Vec<serde_json::Value> = current
|
||||||
.map(|l| LogLevel::from_str_or_default(l, LogLevel::Info))
|
|
||||||
.unwrap_or(LogLevel::All);
|
|
||||||
|
|
||||||
let filtered_logs: Vec<serde_json::Value> = current
|
|
||||||
.logs
|
.logs
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|log| filter_log_level.should_include(&log.log_type))
|
|
||||||
.map(|log| {
|
.map(|log| {
|
||||||
serde_json::json!({
|
serde_json::json!({
|
||||||
"type": log.log_type,
|
"type": log.log_type,
|
||||||
@@ -363,7 +346,7 @@ impl LogsMonitor {
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
serde_json::Value::Array(filtered_logs)
|
serde_json::Value::Array(logs)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -379,6 +362,6 @@ pub async fn clear_logs() {
|
|||||||
LogsMonitor::global().clear_logs().await;
|
LogsMonitor::global().clear_logs().await;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_logs_json(level: Option<String>) -> serde_json::Value {
|
pub async fn get_logs_json() -> serde_json::Value {
|
||||||
LogsMonitor::global().get_logs_as_json(level).await
|
LogsMonitor::global().get_logs_as_json().await
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -426,7 +426,9 @@ pub fn run() {
|
|||||||
{
|
{
|
||||||
std::env::set_var("WEBKIT_DISABLE_DMABUF_RENDERER", "1");
|
std::env::set_var("WEBKIT_DISABLE_DMABUF_RENDERER", "1");
|
||||||
|
|
||||||
let desktop_env = std::env::var("XDG_CURRENT_DESKTOP").unwrap_or_default().to_uppercase();
|
let desktop_env = std::env::var("XDG_CURRENT_DESKTOP")
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_uppercase();
|
||||||
let is_kde_desktop = desktop_env.contains("KDE");
|
let is_kde_desktop = desktop_env.contains("KDE");
|
||||||
let is_plasma_desktop = desktop_env.contains("PLASMA");
|
let is_plasma_desktop = desktop_env.contains("PLASMA");
|
||||||
|
|
||||||
|
|||||||
@@ -24,15 +24,7 @@ import {
|
|||||||
toggleLogEnabled,
|
toggleLogEnabled,
|
||||||
} from "@/services/global-log-service";
|
} from "@/services/global-log-service";
|
||||||
|
|
||||||
// 定义日志级别结构 - 与后端保持一致
|
// 后端通过 /logs?level={level} 进行筛选,前端不再需要手动筛选日志级别
|
||||||
// 后端顺序:Debug < Info < Warning < Error, All 显示所有
|
|
||||||
const LOG_LEVEL_HIERARCHY = {
|
|
||||||
all: ["debug", "info", "warning", "error"], // All: 显示所有等级
|
|
||||||
debug: ["debug", "info", "warning", "error"], // Debug: 显示所有等级(最低级别)
|
|
||||||
info: ["info", "warning", "error"], // Info: 显示 Info、Warning、Error
|
|
||||||
warning: ["warning", "error"], // Warning: 显示 Warning、Error
|
|
||||||
error: ["error"], // Error: 仅显示 Error
|
|
||||||
};
|
|
||||||
|
|
||||||
const LogPage = () => {
|
const LogPage = () => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
@@ -53,22 +45,18 @@ const LogPage = () => {
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
const allowedTypes = LOG_LEVEL_HIERARCHY[logLevel] || [];
|
// Server-side filtering handles level filtering via query parameters
|
||||||
|
// We only need to apply search filtering here
|
||||||
return logData.filter((data) => {
|
return logData.filter((data) => {
|
||||||
const logType = data.type?.toLowerCase() || "";
|
|
||||||
const isAllowedType =
|
|
||||||
logLevel === "all" || allowedTypes.includes(logType);
|
|
||||||
|
|
||||||
// 构建完整的搜索文本,包含时间、类型和内容
|
// 构建完整的搜索文本,包含时间、类型和内容
|
||||||
const searchText =
|
const searchText =
|
||||||
`${data.time || ""} ${data.type} ${data.payload}`.toLowerCase();
|
`${data.time || ""} ${data.type} ${data.payload}`.toLowerCase();
|
||||||
|
|
||||||
const matchesSearch = match(searchText);
|
const matchesSearch = match(searchText);
|
||||||
|
|
||||||
return isAllowedType && matchesSearch;
|
return matchesSearch;
|
||||||
});
|
});
|
||||||
}, [logData, logLevel, match]);
|
}, [logData, match]);
|
||||||
|
|
||||||
const handleLogLevelChange = (newLevel: LogLevel) => {
|
const handleLogLevelChange = (newLevel: LogLevel) => {
|
||||||
setLogLevel(newLevel);
|
setLogLevel(newLevel);
|
||||||
@@ -105,17 +93,15 @@ const LogPage = () => {
|
|||||||
)}
|
)}
|
||||||
</IconButton>
|
</IconButton>
|
||||||
|
|
||||||
{enableLog === true && (
|
<Button
|
||||||
<Button
|
size="small"
|
||||||
size="small"
|
variant="contained"
|
||||||
variant="contained"
|
onClick={() => {
|
||||||
onClick={() => {
|
clearGlobalLogs();
|
||||||
clearGlobalLogs();
|
}}
|
||||||
}}
|
>
|
||||||
>
|
{t("Clear")}
|
||||||
{t("Clear")}
|
</Button>
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
</Box>
|
</Box>
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
|
|||||||
@@ -412,8 +412,8 @@ export async function gc() {
|
|||||||
return invoke<void>("clash_gc");
|
return invoke<void>("clash_gc");
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getClashLogs(level?: string) {
|
export async function getClashLogs() {
|
||||||
return invoke<any>("get_clash_logs", { level });
|
return invoke<any>("get_clash_logs");
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function startLogsMonitoring(level?: string) {
|
export async function startLogsMonitoring(level?: string) {
|
||||||
|
|||||||
@@ -44,8 +44,9 @@ export const fetchLogsViaIPC = async (
|
|||||||
logLevel: LogLevel = "info",
|
logLevel: LogLevel = "info",
|
||||||
): Promise<ILogItem[]> => {
|
): Promise<ILogItem[]> => {
|
||||||
try {
|
try {
|
||||||
const level = logLevel === "all" ? undefined : logLevel;
|
// Server-side filtering handles the level via /logs?level={level}
|
||||||
const response = await getClashLogs(level);
|
// We just fetch all cached logs regardless of the logLevel parameter
|
||||||
|
const response = await getClashLogs();
|
||||||
|
|
||||||
// The response should be in the format expected by the frontend
|
// The response should be in the format expected by the frontend
|
||||||
// Transform the logs to match the expected format
|
// Transform the logs to match the expected format
|
||||||
|
|||||||
Reference in New Issue
Block a user