mirror of
https://github.com/clash-verge-rev/clash-verge-rev.git
synced 2026-01-29 17:15:38 +08:00
refactor: clash-verge-service management (#4674)
* refactor: clash-verge-service management * fix: correct service state checks in ProxyControlSwitches component refactor: improve logging in service state update functions * fix: add missing async handler for Windows and adjust logging import for macOS * fix: streamline logging imports and add missing async handler for Windows * refactor: remove unused useServiceStateSync hook and update imports in _layout * refactor: remove unused useServiceStateSync import and clean up code in ProxyControlSwitches and _layout * refactor: simplify service status checks and reduce wait time in useServiceInstaller hook * refactor: remove unnecessary logging statements in service checks and IPC connection * refactor: extract SwitchRow component for better code organization and readability * refactor: enhance service state management and update related mutations in layout * refactor: streamline core stopping logic and improve IPC connection logging * refactor: consolidate service uninstallation logic and improve error handling * fix: simplify conditional statements in CoreManager and service functions * feat: add backoff dependency and implement retry strategy for IPC requests * refactor: remove redundant Windows conditional and improve error handling in IPC tests * test: improve error handling in IPC tests for message signing and verification * fix: adjust IPC backoff retry parameters * refactor: Remove service state tracking and related logic from service management * feat: Enhance service status handling with logging and running mode updates * fix: Improve service status handling with enhanced error logging * fix: Ensure proper handling of service operations with error propagation * refactor: Simplify service operation execution and enhance service status handling * fix: Improve error message formatting in service operation execution and simplify service status retrieval * refactor: Replace Cache with CacheProxy in multiple modules and update CacheEntry to be generic * fix: Remove unnecessary success message from config validation * refactor: Comment out logging statements in service version check and IPC request handling
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
use super::CmdResult;
|
||||
use crate::{
|
||||
cache::Cache,
|
||||
cache::CacheProxy,
|
||||
config::Config,
|
||||
core::{CoreManager, handle},
|
||||
};
|
||||
@@ -317,8 +317,8 @@ pub async fn get_clash_version() -> CmdResult<serde_json::Value> {
|
||||
#[tauri::command]
|
||||
pub async fn get_clash_config() -> CmdResult<serde_json::Value> {
|
||||
let manager = IpcManager::global();
|
||||
let cache = Cache::global();
|
||||
let key = Cache::make_key("clash_config", "default");
|
||||
let cache = CacheProxy::global();
|
||||
let key = CacheProxy::make_key("clash_config", "default");
|
||||
let value = cache
|
||||
.get_or_fetch(key, CONFIG_REFRESH_INTERVAL, || async {
|
||||
manager.get_config().await.unwrap_or_else(|e| {
|
||||
@@ -333,8 +333,8 @@ pub async fn get_clash_config() -> CmdResult<serde_json::Value> {
|
||||
/// 强制刷新Clash配置缓存
|
||||
#[tauri::command]
|
||||
pub async fn force_refresh_clash_config() -> CmdResult<serde_json::Value> {
|
||||
let cache = Cache::global();
|
||||
let key = Cache::make_key("clash_config", "default");
|
||||
let cache = CacheProxy::global();
|
||||
let key = CacheProxy::make_key("clash_config", "default");
|
||||
cache.map.remove(&key);
|
||||
get_clash_config().await
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use tauri::Emitter;
|
||||
|
||||
use super::CmdResult;
|
||||
use crate::{
|
||||
cache::Cache,
|
||||
cache::CacheProxy,
|
||||
core::{handle::Handle, tray::Tray},
|
||||
ipc::IpcManager,
|
||||
logging,
|
||||
@@ -15,8 +15,8 @@ const PROVIDERS_REFRESH_INTERVAL: Duration = Duration::from_secs(60);
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_proxies() -> CmdResult<serde_json::Value> {
|
||||
let cache = Cache::global();
|
||||
let key = Cache::make_key("proxies", "default");
|
||||
let cache = CacheProxy::global();
|
||||
let key = CacheProxy::make_key("proxies", "default");
|
||||
let value = cache
|
||||
.get_or_fetch(key, PROXIES_REFRESH_INTERVAL, || async {
|
||||
let manager = IpcManager::global();
|
||||
@@ -32,16 +32,16 @@ pub async fn get_proxies() -> CmdResult<serde_json::Value> {
|
||||
/// 强制刷新代理缓存用于profile切换
|
||||
#[tauri::command]
|
||||
pub async fn force_refresh_proxies() -> CmdResult<serde_json::Value> {
|
||||
let cache = Cache::global();
|
||||
let key = Cache::make_key("proxies", "default");
|
||||
let cache = CacheProxy::global();
|
||||
let key = CacheProxy::make_key("proxies", "default");
|
||||
cache.map.remove(&key);
|
||||
get_proxies().await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_providers_proxies() -> CmdResult<serde_json::Value> {
|
||||
let cache = Cache::global();
|
||||
let key = Cache::make_key("providers", "default");
|
||||
let cache = CacheProxy::global();
|
||||
let key = CacheProxy::make_key("providers", "default");
|
||||
let value = cache
|
||||
.get_or_fetch(key, PROVIDERS_REFRESH_INTERVAL, || async {
|
||||
let manager = IpcManager::global();
|
||||
@@ -85,8 +85,8 @@ pub async fn update_proxy_and_sync(group: String, proxy: String) -> CmdResult<()
|
||||
proxy
|
||||
);
|
||||
|
||||
let cache = Cache::global();
|
||||
let key = Cache::make_key("proxies", "default");
|
||||
let cache = CacheProxy::global();
|
||||
let key = CacheProxy::make_key("proxies", "default");
|
||||
cache.map.remove(&key);
|
||||
|
||||
if let Err(e) = Tray::global().update_menu().await {
|
||||
|
||||
@@ -1,45 +1,47 @@
|
||||
use super::CmdResult;
|
||||
use crate::{
|
||||
core::{CoreManager, service},
|
||||
core::{
|
||||
CoreManager,
|
||||
service::{self, SERVICE_MANAGER, ServiceStatus},
|
||||
},
|
||||
utils::i18n::t,
|
||||
};
|
||||
use anyhow::Result;
|
||||
|
||||
async fn execute_service_operation_sync<F, Fut, E>(service_op: F, op_type: &str) -> CmdResult
|
||||
where
|
||||
F: FnOnce() -> Fut,
|
||||
Fut: std::future::Future<Output = Result<(), E>>,
|
||||
E: ToString + std::fmt::Debug,
|
||||
{
|
||||
if let Err(e) = service_op().await {
|
||||
let emsg = format!("{} {} failed: {}", op_type, "Service", e.to_string());
|
||||
async fn execute_service_operation_sync(status: ServiceStatus, op_type: &str) -> CmdResult {
|
||||
if let Err(e) = SERVICE_MANAGER
|
||||
.lock()
|
||||
.await
|
||||
.handle_service_status(&status)
|
||||
.await
|
||||
{
|
||||
let emsg = format!("{} Service failed: {}", op_type, e);
|
||||
return Err(t(emsg.as_str()).await);
|
||||
}
|
||||
if CoreManager::global().restart_core().await.is_err() {
|
||||
let emsg = format!("{} {} failed", "Restart", "Core");
|
||||
return Err(t(emsg.as_str()).await);
|
||||
let emsg = "Restart Core failed";
|
||||
return Err(t(emsg).await);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn install_service() -> CmdResult {
|
||||
execute_service_operation_sync(service::install_service, "Install").await
|
||||
execute_service_operation_sync(ServiceStatus::InstallRequired, "Install").await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn uninstall_service() -> CmdResult {
|
||||
execute_service_operation_sync(service::uninstall_service, "Uninstall").await
|
||||
execute_service_operation_sync(ServiceStatus::UninstallRequired, "Uninstall").await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn reinstall_service() -> CmdResult {
|
||||
execute_service_operation_sync(service::reinstall_service, "Reinstall").await
|
||||
execute_service_operation_sync(ServiceStatus::ReinstallRequired, "Reinstall").await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn repair_service() -> CmdResult {
|
||||
execute_service_operation_sync(service::force_reinstall_service, "Repair").await
|
||||
execute_service_operation_sync(ServiceStatus::ForceReinstallRequired, "Repair").await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
|
||||
Reference in New Issue
Block a user