mirror of
https://github.com/clash-verge-rev/clash-verge-rev.git
synced 2026-01-29 00:35:38 +08:00
refactor: add debounce to optimize config updates and provider refresh handling
This commit is contained in:
@@ -10,7 +10,6 @@ use crate::{
|
||||
/// 获取配置文件列表
|
||||
#[tauri::command]
|
||||
pub fn get_profiles() -> CmdResult<IProfiles> {
|
||||
let _ = Tray::global().update_menu();
|
||||
Ok(Config::profiles().data().clone())
|
||||
}
|
||||
|
||||
@@ -154,11 +153,25 @@ pub async fn patch_profiles_config(profiles: IProfiles) -> CmdResult<bool> {
|
||||
match CoreManager::global().update_config().await {
|
||||
Ok((true, _)) => {
|
||||
logging!(info, Type::Cmd, true, "配置更新成功");
|
||||
handle::Handle::refresh_clash();
|
||||
let _ = Tray::global().update_tooltip();
|
||||
Config::profiles().apply();
|
||||
wrap_err!(Config::profiles().data().save_file())?;
|
||||
handle::Handle::refresh_clash();
|
||||
|
||||
crate::process::AsyncHandler::spawn(|| async move {
|
||||
if let Err(e) = Tray::global().update_tooltip() {
|
||||
log::warn!(target: "app", "异步更新托盘提示失败: {}", e);
|
||||
}
|
||||
|
||||
if let Err(e) = Tray::global().update_menu() {
|
||||
log::warn!(target: "app", "异步更新托盘菜单失败: {}", e);
|
||||
}
|
||||
|
||||
// 保存配置文件
|
||||
if let Err(e) = Config::profiles().data().save_file() {
|
||||
log::warn!(target: "app", "异步保存配置文件失败: {}", e);
|
||||
}
|
||||
});
|
||||
|
||||
// 立即通知前端配置变更
|
||||
if let Some(current) = ¤t_value {
|
||||
logging!(info, Type::Cmd, true, "向前端发送配置变更事件: {}", current);
|
||||
handle::Handle::notify_profile_changed(current.clone());
|
||||
@@ -185,7 +198,13 @@ pub async fn patch_profiles_config(profiles: IProfiles) -> CmdResult<bool> {
|
||||
// 静默恢复,不触发验证
|
||||
wrap_err!({ Config::profiles().draft().patch_config(restore_profiles) })?;
|
||||
Config::profiles().apply();
|
||||
wrap_err!(Config::profiles().data().save_file())?;
|
||||
|
||||
crate::process::AsyncHandler::spawn(|| async move {
|
||||
if let Err(e) = Config::profiles().data().save_file() {
|
||||
log::warn!(target: "app", "异步保存恢复配置文件失败: {}", e);
|
||||
}
|
||||
});
|
||||
|
||||
logging!(info, Type::Cmd, true, "成功恢复到之前的配置");
|
||||
}
|
||||
|
||||
|
||||
@@ -1,24 +1,95 @@
|
||||
use super::CmdResult;
|
||||
use crate::module::mihomo::MihomoManager;
|
||||
use crate::{core::handle, module::mihomo::MihomoManager};
|
||||
use once_cell::sync::Lazy;
|
||||
use parking_lot::Mutex;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
static LAST_REFRESH_TIME: Lazy<Mutex<Option<Instant>>> = Lazy::new(|| Mutex::new(None));
|
||||
static LAST_EVENT_TIME: Lazy<Mutex<Option<Instant>>> = Lazy::new(|| Mutex::new(None));
|
||||
static IS_REFRESHING: AtomicBool = AtomicBool::new(false);
|
||||
const REFRESH_INTERVAL: Duration = Duration::from_secs(3);
|
||||
const EVENT_INTERVAL: Duration = Duration::from_secs(1);
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_proxies() -> CmdResult<serde_json::Value> {
|
||||
let mannager = MihomoManager::global();
|
||||
let manager = MihomoManager::global();
|
||||
|
||||
mannager
|
||||
manager
|
||||
.refresh_proxies()
|
||||
.await
|
||||
.map(|_| mannager.get_proxies())
|
||||
.or_else(|_| Ok(mannager.get_proxies()))
|
||||
.map(|_| manager.get_proxies())
|
||||
.or_else(|_| Ok(manager.get_proxies()))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_providers_proxies() -> CmdResult<serde_json::Value> {
|
||||
let mannager = MihomoManager::global();
|
||||
let manager = MihomoManager::global();
|
||||
let cached_data = manager.get_providers_proxies();
|
||||
|
||||
mannager
|
||||
.refresh_providers_proxies()
|
||||
.await
|
||||
.map(|_| mannager.get_providers_proxies())
|
||||
.or_else(|_| Ok(mannager.get_providers_proxies()))
|
||||
let safe_data = if cached_data.is_null() {
|
||||
serde_json::json!({
|
||||
"providers": {}
|
||||
})
|
||||
} else {
|
||||
cached_data
|
||||
};
|
||||
|
||||
// 检查是否需要刷新
|
||||
let should_refresh = {
|
||||
let last_refresh = LAST_REFRESH_TIME.lock();
|
||||
match *last_refresh {
|
||||
Some(last_time) => last_time.elapsed() > REFRESH_INTERVAL,
|
||||
None => true,
|
||||
}
|
||||
};
|
||||
|
||||
if should_refresh && !IS_REFRESHING.load(Ordering::Acquire) {
|
||||
IS_REFRESHING.store(true, Ordering::Release);
|
||||
|
||||
crate::process::AsyncHandler::spawn(|| async move {
|
||||
let manager = MihomoManager::global();
|
||||
match manager.refresh_providers_proxies().await {
|
||||
Ok(_) => {
|
||||
log::debug!(target: "app", "providers_proxies后台刷新成功");
|
||||
|
||||
let should_send_event = {
|
||||
let mut last_event = LAST_EVENT_TIME.lock();
|
||||
match *last_event {
|
||||
Some(last_time) => {
|
||||
if last_time.elapsed() > EVENT_INTERVAL {
|
||||
*last_event = Some(Instant::now());
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
None => {
|
||||
*last_event = Some(Instant::now());
|
||||
true
|
||||
}
|
||||
}
|
||||
};
|
||||
if should_send_event {
|
||||
handle::Handle::refresh_providers_proxies();
|
||||
log::debug!(target: "app", "已发送providers_proxies刷新事件");
|
||||
} else {
|
||||
log::debug!(target: "app", "跳过providers_proxies事件发送(频率限制)");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!(target: "app", "providers_proxies后台刷新失败: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
let mut last_refresh = LAST_REFRESH_TIME.lock();
|
||||
*last_refresh = Some(Instant::now());
|
||||
}
|
||||
|
||||
IS_REFRESHING.store(false, Ordering::Release);
|
||||
});
|
||||
}
|
||||
|
||||
Ok(safe_data)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user