mirror of
https://github.com/clash-verge-rev/clash-verge-rev.git
synced 2026-01-29 17:15:38 +08:00
style: adjust rustfmt max_width to 120
This commit is contained in:
@@ -60,7 +60,11 @@ unsafe extern "system" fn shutdown_proc(
|
||||
if let Some(handler) = SHUTDOWN_HANDLER.get() {
|
||||
if let Some(Some(rt)) = RUNTIME.get() {
|
||||
rt.block_on(async {
|
||||
logging!(info, Type::SystemSignal, "Session ended, system shutting down.");
|
||||
logging!(
|
||||
info,
|
||||
Type::SystemSignal,
|
||||
"Session ended, system shutting down."
|
||||
);
|
||||
handler().await;
|
||||
logging!(info, Type::SystemSignal, "resolved reset finished");
|
||||
});
|
||||
@@ -154,7 +158,11 @@ where
|
||||
std::ptr::null_mut(),
|
||||
);
|
||||
if hwnd.is_null() {
|
||||
logging!(error, Type::SystemSignal, "failed to create shutdown window");
|
||||
logging!(
|
||||
error,
|
||||
Type::SystemSignal,
|
||||
"failed to create shutdown window"
|
||||
);
|
||||
} else {
|
||||
app_handle.manage(ShutdownState { hwnd });
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
max_width = 100
|
||||
max_width = 120
|
||||
hard_tabs = false
|
||||
tab_spaces = 4
|
||||
newline_style = "Auto"
|
||||
|
||||
@@ -89,10 +89,7 @@ pub fn get_portable_flag() -> bool {
|
||||
/// 获取应用目录
|
||||
#[tauri::command]
|
||||
pub fn get_app_dir() -> CmdResult<String> {
|
||||
let app_home_dir = dirs::app_home_dir()
|
||||
.stringify_err()?
|
||||
.to_string_lossy()
|
||||
.into();
|
||||
let app_home_dir = dirs::app_home_dir().stringify_err()?.to_string_lossy().into();
|
||||
Ok(app_home_dir)
|
||||
}
|
||||
|
||||
@@ -105,10 +102,7 @@ pub fn get_auto_launch_status() -> CmdResult<bool> {
|
||||
/// 下载图标缓存
|
||||
#[tauri::command]
|
||||
pub async fn download_icon_cache(url: String, name: String) -> CmdResult<String> {
|
||||
let icon_cache_dir = dirs::app_home_dir()
|
||||
.stringify_err()?
|
||||
.join("icons")
|
||||
.join("cache");
|
||||
let icon_cache_dir = dirs::app_home_dir().stringify_err()?.join("icons").join("cache");
|
||||
let icon_path = icon_cache_dir.join(name.as_str());
|
||||
|
||||
if icon_path.exists() {
|
||||
@@ -134,9 +128,7 @@ pub async fn download_icon_cache(url: String, name: String) -> CmdResult<String>
|
||||
let content = response.bytes().await.stringify_err()?;
|
||||
|
||||
let is_html = content.len() > 15
|
||||
&& (content.starts_with(b"<!DOCTYPE html")
|
||||
|| content.starts_with(b"<html")
|
||||
|| content.starts_with(b"<?xml"));
|
||||
&& (content.starts_with(b"<!DOCTYPE html") || content.starts_with(b"<html") || content.starts_with(b"<?xml"));
|
||||
|
||||
if is_image && !is_html {
|
||||
{
|
||||
|
||||
@@ -30,7 +30,5 @@ pub async fn restore_local_backup(filename: String) -> CmdResult<()> {
|
||||
/// Export local backup to a user selected destination
|
||||
#[tauri::command]
|
||||
pub async fn export_local_backup(filename: String, destination: String) -> CmdResult<()> {
|
||||
feat::export_local_backup(filename, destination)
|
||||
.await
|
||||
.stringify_err()
|
||||
feat::export_local_backup(filename, destination).await.stringify_err()
|
||||
}
|
||||
|
||||
@@ -46,26 +46,18 @@ pub async fn change_clash_core(clash_core: String) -> CmdResult<Option<String>>
|
||||
|
||||
match CoreManager::global().change_core(&clash_core).await {
|
||||
Ok(_) => {
|
||||
logging_error!(
|
||||
Type::Core,
|
||||
Config::profiles().await.latest_arc().save_file().await
|
||||
);
|
||||
logging_error!(Type::Core, Config::profiles().await.latest_arc().save_file().await);
|
||||
|
||||
// 切换内核后重启内核
|
||||
match CoreManager::global().restart_core().await {
|
||||
Ok(_) => {
|
||||
logging!(
|
||||
info,
|
||||
Type::Core,
|
||||
"core changed and restarted to {clash_core}"
|
||||
);
|
||||
logging!(info, Type::Core, "core changed and restarted to {clash_core}");
|
||||
handle::Handle::notice_message("config_core::change_success", clash_core);
|
||||
handle::Handle::refresh_clash();
|
||||
Ok(None)
|
||||
}
|
||||
Err(err) => {
|
||||
let error_msg: String =
|
||||
format!("Core changed but failed to restart: {err}").into();
|
||||
let error_msg: String = format!("Core changed but failed to restart: {err}").into();
|
||||
handle::Handle::notice_message("config_core::change_error", error_msg.clone());
|
||||
logging!(error, Type::Core, "{error_msg}");
|
||||
Ok(Some(error_msg))
|
||||
@@ -94,10 +86,7 @@ pub async fn start_core() -> CmdResult {
|
||||
/// 关闭核心
|
||||
#[tauri::command]
|
||||
pub async fn stop_core() -> CmdResult {
|
||||
logging_error!(
|
||||
Type::Core,
|
||||
Config::profiles().await.latest_arc().save_file().await
|
||||
);
|
||||
logging_error!(Type::Core, Config::profiles().await.latest_arc().save_file().await);
|
||||
let result = CoreManager::global().stop_core().await.stringify_err();
|
||||
if result.is_ok() {
|
||||
handle::Handle::refresh_clash();
|
||||
@@ -108,10 +97,7 @@ pub async fn stop_core() -> CmdResult {
|
||||
/// 重启核心
|
||||
#[tauri::command]
|
||||
pub async fn restart_core() -> CmdResult {
|
||||
logging_error!(
|
||||
Type::Core,
|
||||
Config::profiles().await.latest_arc().save_file().await
|
||||
);
|
||||
logging_error!(Type::Core, Config::profiles().await.latest_arc().save_file().await);
|
||||
let result = CoreManager::global().restart_core().await.stringify_err();
|
||||
if result.is_ok() {
|
||||
handle::Handle::refresh_clash();
|
||||
@@ -140,9 +126,7 @@ pub async fn save_dns_config(dns_config: Mapping) -> CmdResult {
|
||||
use tokio::fs;
|
||||
|
||||
// 获取DNS配置文件路径
|
||||
let dns_path = dirs::app_home_dir()
|
||||
.stringify_err()?
|
||||
.join(constants::files::DNS_CONFIG);
|
||||
let dns_path = dirs::app_home_dir().stringify_err()?.join(constants::files::DNS_CONFIG);
|
||||
|
||||
// 保存DNS配置到文件
|
||||
let yaml_str = serde_yaml_ng::to_string(&dns_config).stringify_err()?;
|
||||
@@ -157,9 +141,7 @@ pub async fn save_dns_config(dns_config: Mapping) -> CmdResult {
|
||||
pub async fn apply_dns_config(apply: bool) -> CmdResult {
|
||||
if apply {
|
||||
// 读取DNS配置文件
|
||||
let dns_path = dirs::app_home_dir()
|
||||
.stringify_err()?
|
||||
.join(constants::files::DNS_CONFIG);
|
||||
let dns_path = dirs::app_home_dir().stringify_err()?.join(constants::files::DNS_CONFIG);
|
||||
|
||||
if !dns_path.exists() {
|
||||
logging!(warn, Type::Config, "DNS config file not found");
|
||||
@@ -171,10 +153,9 @@ pub async fn apply_dns_config(apply: bool) -> CmdResult {
|
||||
})?;
|
||||
|
||||
// 解析DNS配置
|
||||
let patch_config = serde_yaml_ng::from_str::<serde_yaml_ng::Mapping>(&dns_yaml)
|
||||
.stringify_err_log(|e| {
|
||||
logging!(error, Type::Config, "Failed to parse DNS config: {e}");
|
||||
})?;
|
||||
let patch_config = serde_yaml_ng::from_str::<serde_yaml_ng::Mapping>(&dns_yaml).stringify_err_log(|e| {
|
||||
logging!(error, Type::Config, "Failed to parse DNS config: {e}");
|
||||
})?;
|
||||
|
||||
logging!(info, Type::Config, "Applying DNS config from file");
|
||||
|
||||
@@ -194,35 +175,25 @@ pub async fn apply_dns_config(apply: bool) -> CmdResult {
|
||||
})?;
|
||||
|
||||
// 应用新配置
|
||||
CoreManager::global()
|
||||
.update_config()
|
||||
.await
|
||||
.stringify_err_log(|err| {
|
||||
let err = format!("Failed to apply config with DNS: {err}");
|
||||
logging!(error, Type::Config, "{err}");
|
||||
})?;
|
||||
CoreManager::global().update_config().await.stringify_err_log(|err| {
|
||||
let err = format!("Failed to apply config with DNS: {err}");
|
||||
logging!(error, Type::Config, "{err}");
|
||||
})?;
|
||||
|
||||
logging!(info, Type::Config, "DNS config successfully applied");
|
||||
} else {
|
||||
// 当关闭DNS设置时,重新生成配置(不加载DNS配置文件)
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"DNS settings disabled, regenerating config"
|
||||
);
|
||||
logging!(info, Type::Config, "DNS settings disabled, regenerating config");
|
||||
|
||||
Config::generate().await.stringify_err_log(|err| {
|
||||
let err = format!("Failed to regenerate config: {err}");
|
||||
logging!(error, Type::Config, "{err}");
|
||||
})?;
|
||||
|
||||
CoreManager::global()
|
||||
.update_config()
|
||||
.await
|
||||
.stringify_err_log(|err| {
|
||||
let err = format!("Failed to apply regenerated config: {err}");
|
||||
logging!(error, Type::Config, "{err}");
|
||||
})?;
|
||||
CoreManager::global().update_config().await.stringify_err_log(|err| {
|
||||
let err = format!("Failed to apply regenerated config: {err}");
|
||||
logging!(error, Type::Config, "{err}");
|
||||
})?;
|
||||
|
||||
logging!(info, Type::Config, "Config regenerated successfully");
|
||||
}
|
||||
@@ -236,9 +207,7 @@ pub async fn apply_dns_config(apply: bool) -> CmdResult {
|
||||
pub fn check_dns_config_exists() -> CmdResult<bool> {
|
||||
use crate::utils::dirs;
|
||||
|
||||
let dns_path = dirs::app_home_dir()
|
||||
.stringify_err()?
|
||||
.join(constants::files::DNS_CONFIG);
|
||||
let dns_path = dirs::app_home_dir().stringify_err()?.join(constants::files::DNS_CONFIG);
|
||||
|
||||
Ok(dns_path.exists())
|
||||
}
|
||||
@@ -249,9 +218,7 @@ pub async fn get_dns_config_content() -> CmdResult<String> {
|
||||
use crate::utils::dirs;
|
||||
use tokio::fs;
|
||||
|
||||
let dns_path = dirs::app_home_dir()
|
||||
.stringify_err()?
|
||||
.join(constants::files::DNS_CONFIG);
|
||||
let dns_path = dirs::app_home_dir().stringify_err()?.join(constants::files::DNS_CONFIG);
|
||||
|
||||
if !fs::try_exists(&dns_path).await.stringify_err()? {
|
||||
return Err("DNS config file not found".into());
|
||||
@@ -279,9 +246,6 @@ pub async fn validate_dns_config() -> CmdResult<(bool, String)> {
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_clash_logs() -> CmdResult<Vec<CompactString>> {
|
||||
let logs = CoreManager::global()
|
||||
.get_clash_logs()
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
let logs = CoreManager::global().get_clash_logs().await.unwrap_or_default();
|
||||
Ok(logs)
|
||||
}
|
||||
|
||||
@@ -12,9 +12,12 @@ pub(super) async fn check_bahamut_anime(client: &Client) -> UnlockItem {
|
||||
|
||||
let client_with_cookies = match Client::builder()
|
||||
.use_rustls_tls()
|
||||
.user_agent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36")
|
||||
.user_agent(
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
|
||||
)
|
||||
.cookie_provider(Arc::clone(&cookie_store))
|
||||
.build() {
|
||||
.build()
|
||||
{
|
||||
Ok(client) => client,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
@@ -59,8 +62,7 @@ pub(super) async fn check_bahamut_anime(client: &Client) -> UnlockItem {
|
||||
};
|
||||
}
|
||||
|
||||
let url =
|
||||
format!("https://ani.gamer.com.tw/ajax/token.php?adID=89422&sn=37783&device={device_id}");
|
||||
let url = format!("https://ani.gamer.com.tw/ajax/token.php?adID=89422&sn=37783&device={device_id}");
|
||||
|
||||
let token_result = match client_with_cookies.get(&url).send().await {
|
||||
Ok(response) => match response.text().await {
|
||||
@@ -85,21 +87,14 @@ pub(super) async fn check_bahamut_anime(client: &Client) -> UnlockItem {
|
||||
};
|
||||
}
|
||||
|
||||
let region = match client_with_cookies
|
||||
.get("https://ani.gamer.com.tw/")
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
let region = match client_with_cookies.get("https://ani.gamer.com.tw/").send().await {
|
||||
Ok(response) => match response.text().await {
|
||||
Ok(body) => match Regex::new(r#"data-geo="([^"]+)"#) {
|
||||
Ok(region_re) => region_re
|
||||
.captures(&body)
|
||||
.and_then(|caps| caps.get(1))
|
||||
.map(|m| {
|
||||
let country_code = m.as_str();
|
||||
let emoji = country_code_to_emoji(country_code);
|
||||
format!("{emoji}{country_code}")
|
||||
}),
|
||||
Ok(region_re) => region_re.captures(&body).and_then(|caps| caps.get(1)).map(|m| {
|
||||
let country_code = m.as_str();
|
||||
let emoji = country_code_to_emoji(country_code);
|
||||
format!("{emoji}{country_code}")
|
||||
}),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
|
||||
@@ -9,8 +9,7 @@ use super::utils::{country_code_to_emoji, get_local_date_string};
|
||||
#[allow(clippy::cognitive_complexity)]
|
||||
pub(super) async fn check_disney_plus(client: &Client) -> UnlockItem {
|
||||
let device_api_url = "https://disney.api.edge.bamgrid.com/devices";
|
||||
let auth_header =
|
||||
"Bearer ZGlzbmV5JmJyb3dzZXImMS4wLjA.Cu56AgSfBTDag5NiRA81oLHkDZfu5L3CKadnefEAY84";
|
||||
let auth_header = "Bearer ZGlzbmV5JmJyb3dzZXImMS4wLjA.Cu56AgSfBTDag5NiRA81oLHkDZfu5L3CKadnefEAY84";
|
||||
|
||||
let device_req_body = serde_json::json!({
|
||||
"deviceFamily": "browser",
|
||||
@@ -39,12 +38,7 @@ pub(super) async fn check_disney_plus(client: &Client) -> UnlockItem {
|
||||
let device_response = match device_result {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to get Disney+ device response: {}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Network, "Failed to get Disney+ device response: {}", e);
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Network Connection)".to_string(),
|
||||
@@ -120,18 +114,12 @@ pub(super) async fn check_disney_plus(client: &Client) -> UnlockItem {
|
||||
}
|
||||
};
|
||||
let token_body = [
|
||||
(
|
||||
"grant_type",
|
||||
"urn:ietf:params:oauth:grant-type:token-exchange",
|
||||
),
|
||||
("grant_type", "urn:ietf:params:oauth:grant-type:token-exchange"),
|
||||
("latitude", "0"),
|
||||
("longitude", "0"),
|
||||
("platform", "browser"),
|
||||
("subject_token", assertion_str.as_str()),
|
||||
(
|
||||
"subject_token_type",
|
||||
"urn:bamtech:params:oauth:token-type:device",
|
||||
),
|
||||
("subject_token_type", "urn:bamtech:params:oauth:token-type:device"),
|
||||
];
|
||||
|
||||
let token_result = client
|
||||
@@ -154,12 +142,7 @@ pub(super) async fn check_disney_plus(client: &Client) -> UnlockItem {
|
||||
let token_response = match token_result {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to get Disney+ token response: {}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Network, "Failed to get Disney+ token response: {}", e);
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Network Connection)".to_string(),
|
||||
@@ -264,12 +247,7 @@ pub(super) async fn check_disney_plus(client: &Client) -> UnlockItem {
|
||||
let graphql_response = match graphql_result {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to get Disney+ GraphQL response: {}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Network, "Failed to get Disney+ GraphQL response: {}", e);
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Network Connection)".to_string(),
|
||||
|
||||
@@ -18,12 +18,7 @@ pub(super) async fn check_gemini(client: &Client) -> UnlockItem {
|
||||
let re = match Regex::new(r#",2,1,200,"([A-Z]{3})""#) {
|
||||
Ok(re) => re,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile Gemini regex: {}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Network, "Failed to compile Gemini regex: {}", e);
|
||||
return UnlockItem {
|
||||
name: "Gemini".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
|
||||
@@ -50,12 +50,7 @@ pub(super) async fn check_netflix(client: &Client) -> UnlockItem {
|
||||
let status1 = match result1 {
|
||||
Ok(response) => response.status().as_u16(),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to get Netflix response 1: {}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Network, "Failed to get Netflix response 1: {}", e);
|
||||
return UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
@@ -68,12 +63,7 @@ pub(super) async fn check_netflix(client: &Client) -> UnlockItem {
|
||||
let status2 = match result2 {
|
||||
Ok(response) => response.status().as_u16(),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to get Netflix response 2: {}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Network, "Failed to get Netflix response 2: {}", e);
|
||||
return UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
@@ -157,12 +147,7 @@ pub(super) async fn check_netflix(client: &Client) -> UnlockItem {
|
||||
async fn check_netflix_cdn(client: &Client) -> UnlockItem {
|
||||
let url = "https://api.fast.com/netflix/speedtest/v2?https=true&token=YXNkZmFzZGxmbnNkYWZoYXNkZmhrYWxm&urlCount=5";
|
||||
|
||||
match client
|
||||
.get(url)
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
match client.get(url).timeout(std::time::Duration::from_secs(30)).send().await {
|
||||
Ok(response) => {
|
||||
if response.status().as_u16() == 403 {
|
||||
return UnlockItem {
|
||||
|
||||
@@ -23,12 +23,7 @@ pub(super) async fn check_prime_video(client: &Client) -> UnlockItem {
|
||||
let response = match result {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to get Prime Video response: {}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Network, "Failed to get Prime Video response: {}", e);
|
||||
return UnlockItem {
|
||||
name: "Prime Video".to_string(),
|
||||
status: "Failed (Network Connection)".to_string(),
|
||||
|
||||
@@ -31,12 +31,7 @@ pub(super) async fn check_youtube_premium(client: &Client) -> UnlockItem {
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile YouTube Premium regex: {}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Network, "Failed to compile YouTube Premium regex: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,10 +40,7 @@ pub async fn get_sys_proxy() -> CmdResult<Mapping> {
|
||||
#[tauri::command]
|
||||
pub async fn get_auto_proxy() -> CmdResult<Mapping> {
|
||||
let auto_proxy = Autoproxy::get_auto_proxy().stringify_err()?;
|
||||
let Autoproxy {
|
||||
ref enable,
|
||||
ref url,
|
||||
} = auto_proxy;
|
||||
let Autoproxy { ref enable, ref url } = auto_proxy;
|
||||
|
||||
let mut map = Mapping::new();
|
||||
map.insert("enable".into(), (*enable).into());
|
||||
|
||||
@@ -4,8 +4,8 @@ use crate::{
|
||||
config::{
|
||||
Config, IProfiles, PrfItem, PrfOption,
|
||||
profiles::{
|
||||
profiles_append_item_with_filedata_safe, profiles_delete_item_safe,
|
||||
profiles_patch_item_safe, profiles_reorder_safe, profiles_save_file_safe,
|
||||
profiles_append_item_with_filedata_safe, profiles_delete_item_safe, profiles_patch_item_safe,
|
||||
profiles_reorder_safe, profiles_save_file_safe,
|
||||
},
|
||||
profiles_append_item_safe,
|
||||
},
|
||||
@@ -223,32 +223,20 @@ async fn validate_new_profile(new_profile: &String) -> Result<(), ()> {
|
||||
// 如果获取到文件路径,检查YAML语法
|
||||
if let Some(file_path) = config_file_result {
|
||||
if !file_path.exists() {
|
||||
logging!(
|
||||
error,
|
||||
Type::Cmd,
|
||||
"目标配置文件不存在: {}",
|
||||
file_path.display()
|
||||
);
|
||||
handle::Handle::notice_message(
|
||||
"config_validate::file_not_found",
|
||||
format!("{}", file_path.display()),
|
||||
);
|
||||
logging!(error, Type::Cmd, "目标配置文件不存在: {}", file_path.display());
|
||||
handle::Handle::notice_message("config_validate::file_not_found", format!("{}", file_path.display()));
|
||||
return Err(());
|
||||
}
|
||||
|
||||
// 超时保护
|
||||
let file_read_result = tokio::time::timeout(
|
||||
Duration::from_secs(5),
|
||||
tokio::fs::read_to_string(&file_path),
|
||||
)
|
||||
.await;
|
||||
let file_read_result =
|
||||
tokio::time::timeout(Duration::from_secs(5), tokio::fs::read_to_string(&file_path)).await;
|
||||
|
||||
match file_read_result {
|
||||
Ok(Ok(content)) => {
|
||||
let yaml_parse_result = AsyncHandler::spawn_blocking(move || {
|
||||
serde_yaml_ng::from_str::<serde_yaml_ng::Value>(&content)
|
||||
})
|
||||
.await;
|
||||
let yaml_parse_result =
|
||||
AsyncHandler::spawn_blocking(move || serde_yaml_ng::from_str::<serde_yaml_ng::Value>(&content))
|
||||
.await;
|
||||
|
||||
match yaml_parse_result {
|
||||
Ok(Ok(_)) => {
|
||||
@@ -257,25 +245,14 @@ async fn validate_new_profile(new_profile: &String) -> Result<(), ()> {
|
||||
}
|
||||
Ok(Err(err)) => {
|
||||
let error_msg = format!(" {err}");
|
||||
logging!(
|
||||
error,
|
||||
Type::Cmd,
|
||||
"目标配置文件存在YAML语法错误:{}",
|
||||
error_msg
|
||||
);
|
||||
handle::Handle::notice_message(
|
||||
"config_validate::yaml_syntax_error",
|
||||
error_msg,
|
||||
);
|
||||
logging!(error, Type::Cmd, "目标配置文件存在YAML语法错误:{}", error_msg);
|
||||
handle::Handle::notice_message("config_validate::yaml_syntax_error", error_msg);
|
||||
Err(())
|
||||
}
|
||||
Err(join_err) => {
|
||||
let error_msg = format!("YAML解析任务失败: {join_err}");
|
||||
logging!(error, Type::Cmd, "{}", error_msg);
|
||||
handle::Handle::notice_message(
|
||||
"config_validate::yaml_parse_error",
|
||||
error_msg,
|
||||
);
|
||||
handle::Handle::notice_message("config_validate::yaml_parse_error", error_msg);
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
@@ -342,10 +319,7 @@ async fn handle_success(current_value: Option<&String>) -> CmdResult<bool> {
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
async fn handle_validation_failure(
|
||||
error_msg: String,
|
||||
current_profile: Option<&String>,
|
||||
) -> CmdResult<bool> {
|
||||
async fn handle_validation_failure(error_msg: String, current_profile: Option<&String>) -> CmdResult<bool> {
|
||||
logging!(warn, Type::Cmd, "配置验证失败: {}", error_msg);
|
||||
Config::profiles().await.discard();
|
||||
if let Some(prev_profile) = current_profile {
|
||||
@@ -373,18 +347,11 @@ async fn handle_timeout(current_profile: Option<&String>) -> CmdResult<bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
async fn perform_config_update(
|
||||
current_value: Option<&String>,
|
||||
current_profile: Option<&String>,
|
||||
) -> CmdResult<bool> {
|
||||
async fn perform_config_update(current_value: Option<&String>, current_profile: Option<&String>) -> CmdResult<bool> {
|
||||
defer! {
|
||||
CURRENT_SWITCHING_PROFILE.store(false, Ordering::Release);
|
||||
}
|
||||
let update_result = tokio::time::timeout(
|
||||
Duration::from_secs(30),
|
||||
CoreManager::global().update_config(),
|
||||
)
|
||||
.await;
|
||||
let update_result = tokio::time::timeout(Duration::from_secs(30), CoreManager::global().update_config()).await;
|
||||
|
||||
match update_result {
|
||||
Ok(Ok((true, _))) => handle_success(current_value).await,
|
||||
@@ -407,12 +374,7 @@ pub async fn patch_profiles_config(profiles: IProfiles) -> CmdResult<bool> {
|
||||
|
||||
let target_profile = profiles.current.as_ref();
|
||||
|
||||
logging!(
|
||||
info,
|
||||
Type::Cmd,
|
||||
"开始修改配置文件,目标profile: {:?}",
|
||||
target_profile
|
||||
);
|
||||
logging!(info, Type::Cmd, "开始修改配置文件,目标profile: {:?}", target_profile);
|
||||
|
||||
// 保存当前配置,以便在验证失败时恢复
|
||||
let previous_profile = Config::profiles().await.data_arc().current.clone();
|
||||
@@ -426,9 +388,7 @@ pub async fn patch_profiles_config(profiles: IProfiles) -> CmdResult<bool> {
|
||||
CURRENT_SWITCHING_PROFILE.store(false, Ordering::Release);
|
||||
return Ok(false);
|
||||
}
|
||||
Config::profiles()
|
||||
.await
|
||||
.edit_draft(|d| d.patch_config(&profiles));
|
||||
Config::profiles().await.edit_draft(|d| d.patch_config(&profiles));
|
||||
|
||||
perform_config_update(target_profile, previous_profile.as_ref()).await
|
||||
}
|
||||
@@ -455,19 +415,14 @@ pub async fn patch_profile(index: String, profile: PrfItem) -> CmdResult {
|
||||
{
|
||||
let old_interval = old_profile.option.as_ref().and_then(|o| o.update_interval);
|
||||
let new_interval = new_option.update_interval;
|
||||
let old_allow_auto_update = old_profile
|
||||
.option
|
||||
.as_ref()
|
||||
.and_then(|o| o.allow_auto_update);
|
||||
let old_allow_auto_update = old_profile.option.as_ref().and_then(|o| o.allow_auto_update);
|
||||
let new_allow_auto_update = new_option.allow_auto_update;
|
||||
(old_interval != new_interval) || (old_allow_auto_update != new_allow_auto_update)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
profiles_patch_item_safe(&index, &profile)
|
||||
.await
|
||||
.stringify_err()?;
|
||||
profiles_patch_item_safe(&index, &profile).await.stringify_err()?;
|
||||
|
||||
// 如果更新间隔或允许自动更新变更,异步刷新定时器
|
||||
if should_refresh_timer {
|
||||
@@ -498,9 +453,7 @@ pub async fn view_profile(index: String) -> CmdResult {
|
||||
.as_ref()
|
||||
.ok_or("the file field is null")?;
|
||||
|
||||
let path = dirs::app_profiles_dir()
|
||||
.stringify_err()?
|
||||
.join(file.as_str());
|
||||
let path = dirs::app_profiles_dir().stringify_err()?.join(file.as_str());
|
||||
if !path.exists() {
|
||||
ret_err!("the file not found");
|
||||
}
|
||||
@@ -515,11 +468,7 @@ pub async fn read_profile_file(index: String) -> CmdResult<String> {
|
||||
let profiles = Config::profiles().await;
|
||||
let profiles_ref = profiles.latest_arc();
|
||||
PrfItem {
|
||||
file: profiles_ref
|
||||
.get_item(&index)
|
||||
.stringify_err()?
|
||||
.file
|
||||
.to_owned(),
|
||||
file: profiles_ref.get_item(&index).stringify_err()?.file.to_owned(),
|
||||
..Default::default()
|
||||
}
|
||||
};
|
||||
|
||||
@@ -58,8 +58,7 @@ pub async fn get_runtime_proxy_chain_config(proxy_chain_exit_node: String) -> Cm
|
||||
|
||||
while let Some(proxy) = proxies.iter().find(|proxy| {
|
||||
if let serde_yaml_ng::Value::Mapping(proxy_map) = proxy {
|
||||
proxy_map.get("name").map(|x| x.as_str()) == proxy_name
|
||||
&& proxy_map.get("dialer-proxy").is_some()
|
||||
proxy_map.get("name").map(|x| x.as_str()) == proxy_name && proxy_map.get("dialer-proxy").is_some()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@@ -94,9 +93,7 @@ pub async fn get_runtime_proxy_chain_config(proxy_chain_exit_node: String) -> Cm
|
||||
|
||||
/// 更新运行时链式代理配置
|
||||
#[tauri::command]
|
||||
pub async fn update_proxy_chain_config_in_runtime(
|
||||
proxy_chain_config: Option<serde_yaml_ng::Value>,
|
||||
) -> CmdResult<()> {
|
||||
pub async fn update_proxy_chain_config_in_runtime(proxy_chain_config: Option<serde_yaml_ng::Value>) -> CmdResult<()> {
|
||||
{
|
||||
let runtime = Config::runtime().await;
|
||||
runtime.edit_draft(|d| d.update_proxy_chain_config(proxy_chain_config));
|
||||
|
||||
@@ -71,10 +71,7 @@ pub async fn save_profile_file(index: String, file_data: Option<String>) -> CmdR
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn restore_original(
|
||||
file_path: &std::path::Path,
|
||||
original_content: &str,
|
||||
) -> Result<(), String> {
|
||||
async fn restore_original(file_path: &std::path::Path, original_content: &str) -> Result<(), String> {
|
||||
fs::write(file_path, original_content).await.stringify_err()
|
||||
}
|
||||
|
||||
@@ -90,34 +87,20 @@ async fn handle_merge_file(
|
||||
file_path: &std::path::Path,
|
||||
original_content: &str,
|
||||
) -> CmdResult<bool> {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"[cmd配置save] 检测到merge文件,只进行语法验证"
|
||||
);
|
||||
logging!(info, Type::Config, "[cmd配置save] 检测到merge文件,只进行语法验证");
|
||||
|
||||
match CoreConfigValidator::validate_config_file(file_path_str, Some(true)).await {
|
||||
Ok((true, _)) => {
|
||||
logging!(info, Type::Config, "[cmd配置save] merge文件语法验证通过");
|
||||
if let Err(e) = CoreManager::global().update_config().await {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Config,
|
||||
"[cmd配置save] 更新整体配置时发生错误: {}",
|
||||
e
|
||||
);
|
||||
logging!(warn, Type::Config, "[cmd配置save] 更新整体配置时发生错误: {}", e);
|
||||
} else {
|
||||
handle::Handle::refresh_clash();
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
Ok((false, error_msg)) => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Config,
|
||||
"[cmd配置save] merge文件语法验证失败: {}",
|
||||
error_msg
|
||||
);
|
||||
logging!(warn, Type::Config, "[cmd配置save] merge文件语法验证失败: {}", error_msg);
|
||||
restore_original(file_path, original_content).await?;
|
||||
let result = (false, error_msg.clone());
|
||||
crate::cmd::validate::handle_yaml_validation_notice(&result, "合并配置文件");
|
||||
@@ -149,27 +132,15 @@ async fn handle_full_validation(
|
||||
|| error_msg.contains("Failed to read file:")
|
||||
|| (!file_path_str.ends_with(".js") && !is_script_error(&error_msg, file_path_str))
|
||||
{
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"[cmd配置save] YAML配置文件验证失败,发送通知"
|
||||
);
|
||||
logging!(info, Type::Config, "[cmd配置save] YAML配置文件验证失败,发送通知");
|
||||
let result = (false, error_msg.to_owned());
|
||||
crate::cmd::validate::handle_yaml_validation_notice(&result, "YAML配置文件");
|
||||
} else if is_script_error(&error_msg, file_path_str) {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"[cmd配置save] 脚本文件验证失败,发送通知"
|
||||
);
|
||||
logging!(info, Type::Config, "[cmd配置save] 脚本文件验证失败,发送通知");
|
||||
let result = (false, error_msg.to_owned());
|
||||
crate::cmd::validate::handle_script_validation_notice(&result, "脚本文件");
|
||||
} else {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"[cmd配置save] 其他类型验证失败,发送一般通知"
|
||||
);
|
||||
logging!(info, Type::Config, "[cmd配置save] 其他类型验证失败,发送一般通知");
|
||||
handle::Handle::notice_message("config_validate::error", error_msg.to_owned());
|
||||
}
|
||||
|
||||
|
||||
@@ -3,12 +3,7 @@ use crate::core::service::{self, SERVICE_MANAGER, ServiceStatus};
|
||||
use smartstring::SmartString;
|
||||
|
||||
async fn execute_service_operation_sync(status: ServiceStatus, op_type: &str) -> CmdResult {
|
||||
if let Err(e) = SERVICE_MANAGER
|
||||
.lock()
|
||||
.await
|
||||
.handle_service_status(&status)
|
||||
.await
|
||||
{
|
||||
if let Err(e) = SERVICE_MANAGER.lock().await.handle_service_status(&status).await {
|
||||
let emsg = format!("{} Service failed: {}", op_type, e);
|
||||
return Err(SmartString::from(emsg));
|
||||
}
|
||||
|
||||
@@ -47,12 +47,7 @@ pub async fn validate_script_file(file_path: String) -> CmdResult<bool> {
|
||||
}
|
||||
Err(e) => {
|
||||
let error_msg = e.to_string();
|
||||
logging!(
|
||||
error,
|
||||
Type::Config,
|
||||
"验证脚本文件过程发生错误: {}",
|
||||
error_msg
|
||||
);
|
||||
logging!(error, Type::Config, "验证脚本文件过程发生错误: {}", error_msg);
|
||||
handle::Handle::notice_message("config_validate::process_terminated", &error_msg);
|
||||
Ok(false)
|
||||
}
|
||||
@@ -64,13 +59,7 @@ pub async fn validate_script_file(file_path: String) -> CmdResult<bool> {
|
||||
pub fn handle_yaml_validation_notice(result: &(bool, String), file_type: &str) {
|
||||
if !result.0 {
|
||||
let error_msg = &result.1;
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"[通知] 处理{}验证错误: {}",
|
||||
file_type,
|
||||
error_msg
|
||||
);
|
||||
logging!(info, Type::Config, "[通知] 处理{}验证错误: {}", file_type, error_msg);
|
||||
|
||||
// 检查是否为merge文件
|
||||
let is_merge_file = file_type.contains("合并");
|
||||
|
||||
@@ -28,9 +28,7 @@ pub async fn save_webdav_config(url: String, username: String, password: String)
|
||||
/// 创建 WebDAV 备份并上传
|
||||
#[tauri::command]
|
||||
pub async fn create_webdav_backup() -> CmdResult<()> {
|
||||
feat::create_backup_and_upload_webdav()
|
||||
.await
|
||||
.stringify_err()
|
||||
feat::create_backup_and_upload_webdav().await.stringify_err()
|
||||
}
|
||||
|
||||
/// 列出 WebDAV 上的备份文件
|
||||
|
||||
@@ -133,15 +133,9 @@ impl IClashTemp {
|
||||
config.insert("external-controller".into(), ctrl.into());
|
||||
|
||||
#[cfg(unix)]
|
||||
config.insert(
|
||||
"external-controller-unix".into(),
|
||||
external_controller_unix.into(),
|
||||
);
|
||||
config.insert("external-controller-unix".into(), external_controller_unix.into());
|
||||
#[cfg(windows)]
|
||||
config.insert(
|
||||
"external-controller-pipe".into(),
|
||||
external_controller_pipe.into(),
|
||||
);
|
||||
config.insert("external-controller-pipe".into(), external_controller_pipe.into());
|
||||
config
|
||||
}
|
||||
|
||||
@@ -152,12 +146,7 @@ impl IClashTemp {
|
||||
}
|
||||
|
||||
pub async fn save_config(&self) -> Result<()> {
|
||||
help::save_yaml(
|
||||
&dirs::clash_path()?,
|
||||
&self.0,
|
||||
Some("# Generated by Clash Verge"),
|
||||
)
|
||||
.await
|
||||
help::save_yaml(&dirs::clash_path()?, &self.0, Some("# Generated by Clash Verge")).await
|
||||
}
|
||||
|
||||
pub fn get_mixed_port(&self) -> u16 {
|
||||
@@ -282,9 +271,7 @@ impl IClashTemp {
|
||||
false => val_str.to_owned(),
|
||||
};
|
||||
|
||||
SocketAddr::from_str(val.as_str())
|
||||
.ok()
|
||||
.map(|s| s.to_string())
|
||||
SocketAddr::from_str(val.as_str()).ok().map(|s| s.to_string())
|
||||
}
|
||||
None => None,
|
||||
})
|
||||
@@ -378,40 +365,19 @@ fn test_clash_info() {
|
||||
|
||||
assert_eq!(get_case(65537, ""), get_result(1, "127.0.0.1:9097"));
|
||||
|
||||
assert_eq!(
|
||||
get_case(8888, "127.0.0.1:8888"),
|
||||
get_result(8888, "127.0.0.1:8888")
|
||||
);
|
||||
assert_eq!(get_case(8888, "127.0.0.1:8888"), get_result(8888, "127.0.0.1:8888"));
|
||||
|
||||
assert_eq!(
|
||||
get_case(8888, " :98888 "),
|
||||
get_result(8888, "127.0.0.1:9097")
|
||||
);
|
||||
assert_eq!(get_case(8888, " :98888 "), get_result(8888, "127.0.0.1:9097"));
|
||||
|
||||
assert_eq!(
|
||||
get_case(8888, "0.0.0.0:8080 "),
|
||||
get_result(8888, "127.0.0.1:8080")
|
||||
);
|
||||
assert_eq!(get_case(8888, "0.0.0.0:8080 "), get_result(8888, "127.0.0.1:8080"));
|
||||
|
||||
assert_eq!(
|
||||
get_case(8888, "0.0.0.0:8080"),
|
||||
get_result(8888, "127.0.0.1:8080")
|
||||
);
|
||||
assert_eq!(get_case(8888, "0.0.0.0:8080"), get_result(8888, "127.0.0.1:8080"));
|
||||
|
||||
assert_eq!(
|
||||
get_case(8888, "[::]:8080"),
|
||||
get_result(8888, "127.0.0.1:8080")
|
||||
);
|
||||
assert_eq!(get_case(8888, "[::]:8080"), get_result(8888, "127.0.0.1:8080"));
|
||||
|
||||
assert_eq!(
|
||||
get_case(8888, "192.168.1.1:8080"),
|
||||
get_result(8888, "192.168.1.1:8080")
|
||||
);
|
||||
assert_eq!(get_case(8888, "192.168.1.1:8080"), get_result(8888, "192.168.1.1:8080"));
|
||||
|
||||
assert_eq!(
|
||||
get_case(8888, "192.168.1.1:80800"),
|
||||
get_result(8888, "127.0.0.1:9097")
|
||||
);
|
||||
assert_eq!(get_case(8888, "192.168.1.1:80800"), get_result(8888, "127.0.0.1:9097"));
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
|
||||
|
||||
@@ -252,8 +252,7 @@ mod tests {
|
||||
#[allow(unused_variables)]
|
||||
#[allow(clippy::expect_used)]
|
||||
fn test_prfitem_from_merge_size() {
|
||||
let merge_item =
|
||||
PrfItem::from_merge(Some("Merge".into())).expect("Failed to create merge item in test");
|
||||
let merge_item = PrfItem::from_merge(Some("Merge".into())).expect("Failed to create merge item in test");
|
||||
let prfitem_size = mem::size_of_val(&merge_item);
|
||||
// Boxed version
|
||||
let boxed_merge_item = Box::new(merge_item);
|
||||
@@ -276,9 +275,6 @@ mod tests {
|
||||
fn test_draft_size_boxed() {
|
||||
let draft = Draft::new(Box::new(IRuntime::new()));
|
||||
let box_iruntime_size = std::mem::size_of_val(&draft);
|
||||
assert_eq!(
|
||||
box_iruntime_size,
|
||||
std::mem::size_of::<Draft<Box<IRuntime>>>()
|
||||
);
|
||||
assert_eq!(box_iruntime_size, std::mem::size_of::<Draft<Box<IRuntime>>>());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,8 +86,7 @@ where
|
||||
|
||||
match encrypted_opt {
|
||||
Some(encrypted) if !encrypted.is_empty() => {
|
||||
let decrypted_string =
|
||||
decrypt_data(&encrypted).map_err(serde::de::Error::custom)?;
|
||||
let decrypted_string = decrypt_data(&encrypted).map_err(serde::de::Error::custom)?;
|
||||
serde_json::from_str(&decrypted_string).map_err(serde::de::Error::custom)
|
||||
}
|
||||
_ => Ok(T::default()),
|
||||
|
||||
@@ -129,9 +129,8 @@ impl PrfOption {
|
||||
result.user_agent = b_ref.user_agent.clone().or(result.user_agent);
|
||||
result.with_proxy = b_ref.with_proxy.or(result.with_proxy);
|
||||
result.self_proxy = b_ref.self_proxy.or(result.self_proxy);
|
||||
result.danger_accept_invalid_certs = b_ref
|
||||
.danger_accept_invalid_certs
|
||||
.or(result.danger_accept_invalid_certs);
|
||||
result.danger_accept_invalid_certs =
|
||||
b_ref.danger_accept_invalid_certs.or(result.danger_accept_invalid_certs);
|
||||
result.allow_auto_update = b_ref.allow_auto_update.or(result.allow_auto_update);
|
||||
result.update_interval = b_ref.update_interval.or(result.update_interval);
|
||||
result.merge = b_ref.merge.clone().or(result.merge);
|
||||
@@ -259,8 +258,7 @@ impl PrfItem {
|
||||
) -> Result<Self> {
|
||||
let with_proxy = option.is_some_and(|o| o.with_proxy.unwrap_or(false));
|
||||
let self_proxy = option.is_some_and(|o| o.self_proxy.unwrap_or(false));
|
||||
let accept_invalid_certs =
|
||||
option.is_some_and(|o| o.danger_accept_invalid_certs.unwrap_or(false));
|
||||
let accept_invalid_certs = option.is_some_and(|o| o.danger_accept_invalid_certs.unwrap_or(false));
|
||||
let allow_auto_update = option.map(|o| o.allow_auto_update.unwrap_or(true));
|
||||
let user_agent = option.and_then(|o| o.user_agent.clone());
|
||||
let update_interval = option.and_then(|o| o.update_interval);
|
||||
@@ -282,13 +280,7 @@ impl PrfItem {
|
||||
|
||||
// 使用网络管理器发送请求
|
||||
let resp = match NetworkManager::new()
|
||||
.get_with_interrupt(
|
||||
url,
|
||||
proxy_type,
|
||||
Some(timeout),
|
||||
user_agent.clone(),
|
||||
accept_invalid_certs,
|
||||
)
|
||||
.get_with_interrupt(url, proxy_type, Some(timeout), user_agent.clone(), accept_invalid_certs)
|
||||
.await
|
||||
{
|
||||
Ok(r) => r,
|
||||
@@ -348,10 +340,7 @@ impl PrfItem {
|
||||
},
|
||||
}
|
||||
}
|
||||
None => Some(
|
||||
crate::utils::help::get_last_part_and_decode(url)
|
||||
.unwrap_or_else(|| "Remote File".into()),
|
||||
),
|
||||
None => Some(crate::utils::help::get_last_part_and_decode(url).unwrap_or_else(|| "Remote File".into())),
|
||||
};
|
||||
let update_interval = match update_interval {
|
||||
Some(val) => Some(val),
|
||||
@@ -374,19 +363,16 @@ impl PrfItem {
|
||||
|
||||
let uid = help::get_uid("R").into();
|
||||
let file = format!("{uid}.yaml").into();
|
||||
let name = name.map(|s| s.to_owned()).unwrap_or_else(|| {
|
||||
filename
|
||||
.map(|s| s.into())
|
||||
.unwrap_or_else(|| "Remote File".into())
|
||||
});
|
||||
let name = name
|
||||
.map(|s| s.to_owned())
|
||||
.unwrap_or_else(|| filename.map(|s| s.into()).unwrap_or_else(|| "Remote File".into()));
|
||||
let data = resp.text_with_charset()?;
|
||||
|
||||
// process the charset "UTF-8 with BOM"
|
||||
let data = data.trim_start_matches('\u{feff}');
|
||||
|
||||
// check the data whether the valid yaml format
|
||||
let yaml = serde_yaml_ng::from_str::<Mapping>(data)
|
||||
.context("the remote profile data is invalid yaml")?;
|
||||
let yaml = serde_yaml_ng::from_str::<Mapping>(data).context("the remote profile data is invalid yaml")?;
|
||||
|
||||
if !yaml.contains_key("proxies") && !yaml.contains_key("proxy-providers") {
|
||||
bail!("profile does not contain `proxies` or `proxy-providers`");
|
||||
@@ -534,9 +520,7 @@ impl PrfItem {
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow::anyhow!("could not find the file"))?;
|
||||
let path = dirs::app_profiles_dir()?.join(file.as_str());
|
||||
let content = fs::read_to_string(path)
|
||||
.await
|
||||
.context("failed to read the file")?;
|
||||
let content = fs::read_to_string(path).await.context("failed to read the file")?;
|
||||
Ok(content.into())
|
||||
}
|
||||
|
||||
|
||||
@@ -45,10 +45,7 @@ macro_rules! patch {
|
||||
|
||||
impl IProfiles {
|
||||
// Helper to find and remove an item by uid from the items vec, returning its file name (if any).
|
||||
fn take_item_file_by_uid(
|
||||
items: &mut Vec<PrfItem>,
|
||||
target_uid: Option<String>,
|
||||
) -> Option<String> {
|
||||
fn take_item_file_by_uid(items: &mut Vec<PrfItem>, target_uid: Option<String>) -> Option<String> {
|
||||
for (i, _) in items.iter().enumerate() {
|
||||
if items[i].uid == target_uid {
|
||||
return items.remove(i).file;
|
||||
@@ -84,12 +81,7 @@ impl IProfiles {
|
||||
}
|
||||
|
||||
pub async fn save_file(&self) -> Result<()> {
|
||||
help::save_yaml(
|
||||
&dirs::profiles_path()?,
|
||||
self,
|
||||
Some("# Profiles Config for Clash Verge"),
|
||||
)
|
||||
.await
|
||||
help::save_yaml(&dirs::profiles_path()?, self, Some("# Profiles Config for Clash Verge")).await
|
||||
}
|
||||
|
||||
/// 只修改current,valid和chain
|
||||
@@ -159,9 +151,10 @@ impl IProfiles {
|
||||
bail!("the file should not be null");
|
||||
}
|
||||
|
||||
let file = item.file.clone().ok_or_else(|| {
|
||||
anyhow::anyhow!("file field is required when file_data is provided")
|
||||
})?;
|
||||
let file = item
|
||||
.file
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow::anyhow!("file field is required when file_data is provided"))?;
|
||||
let path = dirs::app_profiles_dir()?.join(file.as_str());
|
||||
|
||||
fs::write(&path, file_data.as_bytes())
|
||||
@@ -169,9 +162,7 @@ impl IProfiles {
|
||||
.with_context(|| format!("failed to write to file \"{file}\""))?;
|
||||
}
|
||||
|
||||
if self.current.is_none()
|
||||
&& (item.itype == Some("remote".into()) || item.itype == Some("local".into()))
|
||||
{
|
||||
if self.current.is_none() && (item.itype == Some("remote".into()) || item.itype == Some("local".into())) {
|
||||
self.current = uid.to_owned();
|
||||
}
|
||||
|
||||
@@ -259,11 +250,8 @@ impl IProfiles {
|
||||
// move the field value after save
|
||||
if let Some(file_data) = item.file_data.take() {
|
||||
let file = each.file.take();
|
||||
let file = file.unwrap_or_else(|| {
|
||||
item.file
|
||||
.take()
|
||||
.unwrap_or_else(|| format!("{}.yaml", &uid).into())
|
||||
});
|
||||
let file =
|
||||
file.unwrap_or_else(|| item.file.take().unwrap_or_else(|| format!("{}.yaml", &uid).into()));
|
||||
|
||||
// the file must exists
|
||||
each.file = Some(file.clone());
|
||||
@@ -298,42 +286,24 @@ impl IProfiles {
|
||||
|
||||
// remove the main item (if exists) and delete its file
|
||||
if let Some(file) = Self::take_item_file_by_uid(&mut items, Some(uid.clone())) {
|
||||
let _ = dirs::app_profiles_dir()?
|
||||
.join(file.as_str())
|
||||
.remove_if_exists()
|
||||
.await;
|
||||
let _ = dirs::app_profiles_dir()?.join(file.as_str()).remove_if_exists().await;
|
||||
}
|
||||
|
||||
// remove related extension items (merge, script, rules, proxies, groups)
|
||||
if let Some(file) = Self::take_item_file_by_uid(&mut items, merge_uid.clone()) {
|
||||
let _ = dirs::app_profiles_dir()?
|
||||
.join(file.as_str())
|
||||
.remove_if_exists()
|
||||
.await;
|
||||
let _ = dirs::app_profiles_dir()?.join(file.as_str()).remove_if_exists().await;
|
||||
}
|
||||
if let Some(file) = Self::take_item_file_by_uid(&mut items, script_uid.clone()) {
|
||||
let _ = dirs::app_profiles_dir()?
|
||||
.join(file.as_str())
|
||||
.remove_if_exists()
|
||||
.await;
|
||||
let _ = dirs::app_profiles_dir()?.join(file.as_str()).remove_if_exists().await;
|
||||
}
|
||||
if let Some(file) = Self::take_item_file_by_uid(&mut items, rules_uid.clone()) {
|
||||
let _ = dirs::app_profiles_dir()?
|
||||
.join(file.as_str())
|
||||
.remove_if_exists()
|
||||
.await;
|
||||
let _ = dirs::app_profiles_dir()?.join(file.as_str()).remove_if_exists().await;
|
||||
}
|
||||
if let Some(file) = Self::take_item_file_by_uid(&mut items, proxies_uid.clone()) {
|
||||
let _ = dirs::app_profiles_dir()?
|
||||
.join(file.as_str())
|
||||
.remove_if_exists()
|
||||
.await;
|
||||
let _ = dirs::app_profiles_dir()?.join(file.as_str()).remove_if_exists().await;
|
||||
}
|
||||
if let Some(file) = Self::take_item_file_by_uid(&mut items, groups_uid.clone()) {
|
||||
let _ = dirs::app_profiles_dir()?
|
||||
.join(file.as_str())
|
||||
.remove_if_exists()
|
||||
.await;
|
||||
let _ = dirs::app_profiles_dir()?.join(file.as_str()).remove_if_exists().await;
|
||||
}
|
||||
// delete the original uid
|
||||
if current == *uid {
|
||||
@@ -381,11 +351,7 @@ impl IProfiles {
|
||||
.filter_map(|e| {
|
||||
if let (Some(uid), Some(name)) = (e.uid.as_ref(), e.name.as_ref()) {
|
||||
let is_current = self.is_current_profile_index(uid);
|
||||
let preview = IProfilePreview {
|
||||
uid,
|
||||
name,
|
||||
is_current,
|
||||
};
|
||||
let preview = IProfilePreview { uid, name, is_current };
|
||||
Some(preview)
|
||||
} else {
|
||||
None
|
||||
@@ -458,11 +424,7 @@ impl IProfiles {
|
||||
}
|
||||
Err(e) => {
|
||||
failed_deletions.push(format!("{file_name}: {e}").into());
|
||||
logging!(
|
||||
warn,
|
||||
Type::Config,
|
||||
"Warning: 清理文件失败: {file_name} - {e}"
|
||||
);
|
||||
logging!(warn, Type::Config, "Warning: 清理文件失败: {file_name} - {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -584,10 +546,7 @@ impl IProfiles {
|
||||
// 特殊的Send-safe helper函数,完全避免跨await持有guard
|
||||
use crate::config::Config;
|
||||
|
||||
pub async fn profiles_append_item_with_filedata_safe(
|
||||
item: &PrfItem,
|
||||
file_data: Option<String>,
|
||||
) -> Result<()> {
|
||||
pub async fn profiles_append_item_with_filedata_safe(item: &PrfItem, file_data: Option<String>) -> Result<()> {
|
||||
let item = &mut PrfItem::from(item, file_data).await?;
|
||||
profiles_append_item_safe(item).await
|
||||
}
|
||||
|
||||
@@ -308,12 +308,7 @@ impl IVerge {
|
||||
|
||||
Self::reload_config_after_fix(config).await?;
|
||||
} else {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"clash_core配置验证通过: {:?}",
|
||||
config.clash_core
|
||||
);
|
||||
logging!(info, Type::Config, "clash_core配置验证通过: {:?}", config.clash_core);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -338,15 +333,11 @@ impl IVerge {
|
||||
}
|
||||
|
||||
pub fn get_valid_clash_core(&self) -> String {
|
||||
self.clash_core
|
||||
.clone()
|
||||
.unwrap_or_else(|| "verge-mihomo".into())
|
||||
self.clash_core.clone().unwrap_or_else(|| "verge-mihomo".into())
|
||||
}
|
||||
|
||||
fn get_system_language() -> String {
|
||||
let sys_lang = sys_locale::get_locale()
|
||||
.unwrap_or_else(|| "en".into())
|
||||
.to_lowercase();
|
||||
let sys_lang = sys_locale::get_locale().unwrap_or_else(|| "en".into()).to_lowercase();
|
||||
|
||||
let lang_code = sys_lang.split(['_', '-']).next().unwrap_or("en");
|
||||
let supported_languages = i18n::get_supported_languages();
|
||||
|
||||
@@ -84,10 +84,7 @@ impl WebDavClient {
|
||||
} else {
|
||||
// 释放锁后获取异步配置
|
||||
let verge = Config::verge().await.data_arc();
|
||||
if verge.webdav_url.is_none()
|
||||
|| verge.webdav_username.is_none()
|
||||
|| verge.webdav_password.is_none()
|
||||
{
|
||||
if verge.webdav_url.is_none() || verge.webdav_username.is_none() || verge.webdav_password.is_none() {
|
||||
let msg: String =
|
||||
"Unable to create web dav client, please make sure the webdav config is correct".into();
|
||||
return Err(anyhow::Error::msg(msg));
|
||||
@@ -129,10 +126,7 @@ impl WebDavClient {
|
||||
.build()?,
|
||||
)
|
||||
.set_host(config.url.into())
|
||||
.set_auth(reqwest_dav::Auth::Basic(
|
||||
config.username.into(),
|
||||
config.password.into(),
|
||||
))
|
||||
.set_auth(reqwest_dav::Auth::Basic(config.username.into(), config.password.into()))
|
||||
.build()?;
|
||||
|
||||
// 尝试检查目录是否存在,如果不存在尝试创建
|
||||
@@ -144,18 +138,10 @@ impl WebDavClient {
|
||||
match client.mkcol(dirs::BACKUP_DIR).await {
|
||||
Ok(_) => logging!(info, Type::Backup, "Successfully created backup directory"),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Backup,
|
||||
"Warning: Failed to create backup directory: {}",
|
||||
e
|
||||
);
|
||||
logging!(warn, Type::Backup, "Warning: Failed to create backup directory: {}", e);
|
||||
// 清除缓存,强制下次重新尝试
|
||||
self.reset();
|
||||
return Err(anyhow::Error::msg(format!(
|
||||
"Failed to create backup directory: {}",
|
||||
e
|
||||
)));
|
||||
return Err(anyhow::Error::msg(format!("Failed to create backup directory: {}", e)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -191,11 +177,7 @@ impl WebDavClient {
|
||||
|
||||
match upload_result {
|
||||
Err(_) => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Backup,
|
||||
"Warning: Upload timed out, retrying once"
|
||||
);
|
||||
logging!(warn, Type::Backup, "Warning: Upload timed out, retrying once");
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
timeout(
|
||||
Duration::from_secs(TIMEOUT_UPLOAD),
|
||||
@@ -206,11 +188,7 @@ impl WebDavClient {
|
||||
}
|
||||
|
||||
Ok(Err(e)) => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Backup,
|
||||
"Warning: Upload failed, retrying once: {e}"
|
||||
);
|
||||
logging!(warn, Type::Backup, "Warning: Upload failed, retrying once: {e}");
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
timeout(
|
||||
Duration::from_secs(TIMEOUT_UPLOAD),
|
||||
@@ -243,9 +221,7 @@ impl WebDavClient {
|
||||
let path = format!("{}/", dirs::BACKUP_DIR);
|
||||
|
||||
let fut = async {
|
||||
let files = client
|
||||
.list(path.as_str(), reqwest_dav::Depth::Number(1))
|
||||
.await?;
|
||||
let files = client.list(path.as_str(), reqwest_dav::Depth::Number(1)).await?;
|
||||
let mut final_files = Vec::new();
|
||||
for file in files {
|
||||
if let ListEntity::File(file) = file {
|
||||
|
||||
@@ -177,10 +177,7 @@ impl Hotkey {
|
||||
"Failed to reactivate subscriptions via hotkey: {}",
|
||||
err
|
||||
);
|
||||
handle::Handle::notice_message(
|
||||
"reactivate_profiles::error",
|
||||
err.to_string(),
|
||||
);
|
||||
handle::Handle::notice_message("reactivate_profiles::error", err.to_string());
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -205,8 +202,7 @@ impl Hotkey {
|
||||
pub async fn register_system_hotkey(&self, hotkey: SystemHotkey) -> Result<()> {
|
||||
let hotkey_str = hotkey.to_string();
|
||||
let function = hotkey.function();
|
||||
self.register_hotkey_with_function(&hotkey_str, function)
|
||||
.await
|
||||
self.register_hotkey_with_function(&hotkey_str, function).await
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
@@ -218,11 +214,7 @@ impl Hotkey {
|
||||
|
||||
/// Register a hotkey with function enum
|
||||
#[allow(clippy::unused_async)]
|
||||
pub async fn register_hotkey_with_function(
|
||||
&self,
|
||||
hotkey: &str,
|
||||
function: HotkeyFunction,
|
||||
) -> Result<()> {
|
||||
pub async fn register_hotkey_with_function(&self, hotkey: &str, function: HotkeyFunction) -> Result<()> {
|
||||
let app_handle = handle::Handle::app_handle();
|
||||
let manager = app_handle.global_shortcut();
|
||||
|
||||
@@ -261,11 +253,8 @@ impl Hotkey {
|
||||
AsyncHandler::spawn(move || async move {
|
||||
logging!(debug, Type::Hotkey, "Executing function directly");
|
||||
|
||||
let is_enable_global_hotkey = Config::verge()
|
||||
.await
|
||||
.data_arc()
|
||||
.enable_global_hotkey
|
||||
.unwrap_or(true);
|
||||
let is_enable_global_hotkey =
|
||||
Config::verge().await.data_arc().enable_global_hotkey.unwrap_or(true);
|
||||
|
||||
if is_enable_global_hotkey {
|
||||
Self::execute_function(function);
|
||||
@@ -312,12 +301,7 @@ impl Hotkey {
|
||||
let hotkeys = verge.data_arc().hotkeys.clone();
|
||||
|
||||
if let Some(hotkeys) = hotkeys {
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
"Has {} hotkeys need to register",
|
||||
hotkeys.len()
|
||||
);
|
||||
logging!(debug, Type::Hotkey, "Has {} hotkeys need to register", hotkeys.len());
|
||||
|
||||
for hotkey in hotkeys.iter() {
|
||||
let mut iter = hotkey.split(',');
|
||||
@@ -326,13 +310,7 @@ impl Hotkey {
|
||||
|
||||
match (key, func) {
|
||||
(Some(key), Some(func)) => {
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
"Registering hotkey: {} -> {}",
|
||||
key,
|
||||
func
|
||||
);
|
||||
logging!(debug, Type::Hotkey, "Registering hotkey: {} -> {}", key, func);
|
||||
if let Err(e) = self.register(key, func).await {
|
||||
logging!(
|
||||
error,
|
||||
@@ -465,12 +443,7 @@ impl Drop for Hotkey {
|
||||
fn drop(&mut self) {
|
||||
let app_handle = handle::Handle::app_handle();
|
||||
if let Err(e) = app_handle.global_shortcut().unregister_all() {
|
||||
logging!(
|
||||
error,
|
||||
Type::Hotkey,
|
||||
"Error unregistering all hotkeys: {:?}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Hotkey, "Error unregistering all hotkeys: {:?}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,9 +94,6 @@ impl CoreManager {
|
||||
}
|
||||
|
||||
async fn reload_config(&self, path: &str) -> Result<(), MihomoError> {
|
||||
handle::Handle::mihomo()
|
||||
.await
|
||||
.reload_config(true, path)
|
||||
.await
|
||||
handle::Handle::mihomo().await.reload_config(true, path).await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,10 +78,7 @@ impl CoreManager {
|
||||
|
||||
fn after_core_process(&self) {
|
||||
let app_handle = Handle::app_handle();
|
||||
tauri_plugin_clash_verge_sysinfo::set_app_core_mode(
|
||||
app_handle,
|
||||
self.get_running_mode().to_string(),
|
||||
);
|
||||
tauri_plugin_clash_verge_sysinfo::set_app_core_mode(app_handle, self.get_running_mode().to_string());
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
@@ -89,11 +86,7 @@ impl CoreManager {
|
||||
use crate::{config::Config, constants::timing};
|
||||
use backoff::{Error as BackoffError, ExponentialBackoff};
|
||||
|
||||
let needs_service = Config::verge()
|
||||
.await
|
||||
.latest_arc()
|
||||
.enable_tun_mode
|
||||
.unwrap_or(false);
|
||||
let needs_service = Config::verge().await.latest_arc().enable_tun_mode.unwrap_or(false);
|
||||
|
||||
if !needs_service {
|
||||
return;
|
||||
@@ -121,9 +114,7 @@ impl CoreManager {
|
||||
if matches!(manager.current(), ServiceStatus::Ready) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(BackoffError::transient(anyhow::anyhow!(
|
||||
"Service not ready"
|
||||
)))
|
||||
Err(BackoffError::transient(anyhow::anyhow!("Service not ready")))
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -54,8 +54,7 @@ impl CoreManager {
|
||||
self.set_running_child_sidecar(child);
|
||||
self.set_running_mode(RunningMode::Sidecar);
|
||||
|
||||
let shared_writer: SharedWriter =
|
||||
std::sync::Arc::new(tokio::sync::Mutex::new(sidecar_writer().await?));
|
||||
let shared_writer: SharedWriter = std::sync::Arc::new(tokio::sync::Mutex::new(sidecar_writer().await?));
|
||||
|
||||
AsyncHandler::spawn(|| async move {
|
||||
while let Some(event) = rx.recv().await {
|
||||
@@ -64,12 +63,7 @@ impl CoreManager {
|
||||
| tauri_plugin_shell::process::CommandEvent::Stderr(line) => {
|
||||
let mut now = DeferredNow::default();
|
||||
let message = CompactString::from(String::from_utf8_lossy(&line).as_ref());
|
||||
write_sidecar_log(
|
||||
shared_writer.lock().await,
|
||||
&mut now,
|
||||
Level::Error,
|
||||
&message,
|
||||
);
|
||||
write_sidecar_log(shared_writer.lock().await, &mut now, Level::Error, &message);
|
||||
CLASH_LOGGER.append_log(message).await;
|
||||
}
|
||||
tauri_plugin_shell::process::CommandEvent::Terminated(term) => {
|
||||
@@ -81,12 +75,7 @@ impl CoreManager {
|
||||
} else {
|
||||
CompactString::from("Process terminated")
|
||||
};
|
||||
write_sidecar_log(
|
||||
shared_writer.lock().await,
|
||||
&mut now,
|
||||
Level::Info,
|
||||
&message,
|
||||
);
|
||||
write_sidecar_log(shared_writer.lock().await, &mut now, Level::Info, &message);
|
||||
CLASH_LOGGER.clear_logs().await;
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -80,12 +80,7 @@ impl NotificationSystem {
|
||||
|
||||
match result {
|
||||
Ok(handle) => self.worker_handle = Some(handle),
|
||||
Err(e) => logging!(
|
||||
error,
|
||||
Type::System,
|
||||
"Failed to start notification worker: {}",
|
||||
e
|
||||
),
|
||||
Err(e) => logging!(error, Type::System, "Failed to start notification worker: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -148,31 +143,19 @@ impl NotificationSystem {
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_event(
|
||||
&self,
|
||||
event: FrontendEvent,
|
||||
) -> (&'static str, Result<serde_json::Value, serde_json::Error>) {
|
||||
fn serialize_event(&self, event: FrontendEvent) -> (&'static str, Result<serde_json::Value, serde_json::Error>) {
|
||||
use serde_json::json;
|
||||
|
||||
match event {
|
||||
FrontendEvent::RefreshClash => ("verge://refresh-clash-config", Ok(json!("yes"))),
|
||||
FrontendEvent::RefreshVerge => ("verge://refresh-verge-config", Ok(json!("yes"))),
|
||||
FrontendEvent::NoticeMessage { status, message } => (
|
||||
"verge://notice-message",
|
||||
serde_json::to_value((status, message)),
|
||||
),
|
||||
FrontendEvent::ProfileChanged { current_profile_id } => {
|
||||
("profile-changed", Ok(json!(current_profile_id)))
|
||||
}
|
||||
FrontendEvent::TimerUpdated { profile_index } => {
|
||||
("verge://timer-updated", Ok(json!(profile_index)))
|
||||
}
|
||||
FrontendEvent::ProfileUpdateStarted { uid } => {
|
||||
("profile-update-started", Ok(json!({ "uid": uid })))
|
||||
}
|
||||
FrontendEvent::ProfileUpdateCompleted { uid } => {
|
||||
("profile-update-completed", Ok(json!({ "uid": uid })))
|
||||
FrontendEvent::NoticeMessage { status, message } => {
|
||||
("verge://notice-message", serde_json::to_value((status, message)))
|
||||
}
|
||||
FrontendEvent::ProfileChanged { current_profile_id } => ("profile-changed", Ok(json!(current_profile_id))),
|
||||
FrontendEvent::TimerUpdated { profile_index } => ("verge://timer-updated", Ok(json!(profile_index))),
|
||||
FrontendEvent::ProfileUpdateStarted { uid } => ("profile-update-started", Ok(json!({ "uid": uid }))),
|
||||
FrontendEvent::ProfileUpdateCompleted { uid } => ("profile-update-completed", Ok(json!({ "uid": uid }))),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -182,12 +165,7 @@ impl NotificationSystem {
|
||||
|
||||
let errors = self.stats.total_errors.load(Ordering::Relaxed);
|
||||
if errors > retry::EVENT_EMIT_THRESHOLD && !self.emergency_mode.load(Ordering::Acquire) {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Frontend,
|
||||
"Entering emergency mode after {} errors",
|
||||
errors
|
||||
);
|
||||
logging!(warn, Type::Frontend, "Entering emergency mode after {} errors", errors);
|
||||
self.emergency_mode.store(true, Ordering::Release);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,9 +50,7 @@ async fn uninstall_service() -> Result<()> {
|
||||
let level = token.privilege_level()?;
|
||||
let status = match level {
|
||||
PrivilegeLevel::NotPrivileged => RunasCommand::new(uninstall_path).show(false).status()?,
|
||||
_ => StdCommand::new(uninstall_path)
|
||||
.creation_flags(0x08000000)
|
||||
.status()?,
|
||||
_ => StdCommand::new(uninstall_path).creation_flags(0x08000000).status()?,
|
||||
};
|
||||
|
||||
if !status.success() {
|
||||
@@ -85,16 +83,11 @@ async fn install_service() -> Result<()> {
|
||||
let level = token.privilege_level()?;
|
||||
let status = match level {
|
||||
PrivilegeLevel::NotPrivileged => RunasCommand::new(install_path).show(false).status()?,
|
||||
_ => StdCommand::new(install_path)
|
||||
.creation_flags(0x08000000)
|
||||
.status()?,
|
||||
_ => StdCommand::new(install_path).creation_flags(0x08000000).status()?,
|
||||
};
|
||||
|
||||
if !status.success() {
|
||||
bail!(
|
||||
"failed to install service with status {}",
|
||||
status.code().unwrap_or(-1)
|
||||
);
|
||||
bail!("failed to install service with status {}", status.code().unwrap_or(-1));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -123,8 +116,7 @@ async fn reinstall_service() -> Result<()> {
|
||||
async fn uninstall_service() -> Result<()> {
|
||||
logging!(info, Type::Service, "uninstall service");
|
||||
|
||||
let uninstall_path =
|
||||
tauri::utils::platform::current_exe()?.with_file_name("clash-verge-service-uninstall");
|
||||
let uninstall_path = tauri::utils::platform::current_exe()?.with_file_name("clash-verge-service-uninstall");
|
||||
|
||||
if !uninstall_path.exists() {
|
||||
bail!(format!("uninstaller not found: {uninstall_path:?}"));
|
||||
@@ -181,8 +173,7 @@ async fn uninstall_service() -> Result<()> {
|
||||
async fn install_service() -> Result<()> {
|
||||
logging!(info, Type::Service, "install service");
|
||||
|
||||
let install_path =
|
||||
tauri::utils::platform::current_exe()?.with_file_name("clash-verge-service-install");
|
||||
let install_path = tauri::utils::platform::current_exe()?.with_file_name("clash-verge-service-install");
|
||||
|
||||
if !install_path.exists() {
|
||||
bail!(format!("installer not found: {install_path:?}"));
|
||||
@@ -225,10 +216,7 @@ async fn install_service() -> Result<()> {
|
||||
);
|
||||
|
||||
if !status.success() {
|
||||
bail!(
|
||||
"failed to install service with status {}",
|
||||
status.code().unwrap_or(-1)
|
||||
);
|
||||
bail!("failed to install service with status {}", status.code().unwrap_or(-1));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -276,15 +264,12 @@ async fn uninstall_service() -> Result<()> {
|
||||
crate::utils::i18n::sync_locale().await;
|
||||
|
||||
let prompt = rust_i18n::t!("service.adminUninstallPrompt").to_string();
|
||||
let command = format!(
|
||||
r#"do shell script "sudo '{uninstall_shell}'" with administrator privileges with prompt "{prompt}""#
|
||||
);
|
||||
let command =
|
||||
format!(r#"do shell script "sudo '{uninstall_shell}'" with administrator privileges with prompt "{prompt}""#);
|
||||
|
||||
// logging!(debug, Type::Service, "uninstall command: {}", command);
|
||||
|
||||
let status = StdCommand::new("osascript")
|
||||
.args(vec!["-e", &command])
|
||||
.status()?;
|
||||
let status = StdCommand::new("osascript").args(vec!["-e", &command]).status()?;
|
||||
|
||||
if !status.success() {
|
||||
bail!(
|
||||
@@ -312,19 +297,13 @@ async fn install_service() -> Result<()> {
|
||||
crate::utils::i18n::sync_locale().await;
|
||||
|
||||
let prompt = rust_i18n::t!("service.adminInstallPrompt").to_string();
|
||||
let command = format!(
|
||||
r#"do shell script "sudo '{install_shell}'" with administrator privileges with prompt "{prompt}""#
|
||||
);
|
||||
let command =
|
||||
format!(r#"do shell script "sudo '{install_shell}'" with administrator privileges with prompt "{prompt}""#);
|
||||
|
||||
let status = StdCommand::new("osascript")
|
||||
.args(vec!["-e", &command])
|
||||
.status()?;
|
||||
let status = StdCommand::new("osascript").args(vec!["-e", &command]).status()?;
|
||||
|
||||
if !status.success() {
|
||||
bail!(
|
||||
"failed to install service with status {}",
|
||||
status.code().unwrap_or(-1)
|
||||
);
|
||||
bail!("failed to install service with status {}", status.code().unwrap_or(-1));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -362,19 +341,13 @@ async fn check_service_version() -> Result<String> {
|
||||
let version_arc: Result<String> = {
|
||||
logging!(info, Type::Service, "开始检查服务版本 (IPC)");
|
||||
let result = clash_verge_service_ipc::get_version().await;
|
||||
logging!(
|
||||
debug,
|
||||
Type::Service,
|
||||
"检查服务版本 (IPC) 结果: {:?}",
|
||||
result
|
||||
);
|
||||
logging!(debug, Type::Service, "检查服务版本 (IPC) 结果: {:?}", result);
|
||||
|
||||
// 检查错误信息是否是JSON序列化错误或预期值错误,以适配老版本服务
|
||||
// 这可能是因为老版本服务的API不兼容,导致无法正确解析响应
|
||||
// 如果是这种情况,直接返回空字符串,表示无法获取版本
|
||||
if let Err(e) = result.as_ref()
|
||||
&& (e.to_string().contains("JSON serialization error")
|
||||
|| e.to_string().contains("expected value"))
|
||||
&& (e.to_string().contains("JSON serialization error") || e.to_string().contains("expected value"))
|
||||
{
|
||||
logging!(
|
||||
warn,
|
||||
@@ -468,12 +441,7 @@ pub(super) async fn get_clash_logs_by_service() -> Result<Vec<CompactString>> {
|
||||
|
||||
if response.code > 0 {
|
||||
let err_msg = response.message;
|
||||
logging!(
|
||||
error,
|
||||
Type::Service,
|
||||
"获取服务模式下的 Clash 日志失败: {}",
|
||||
err_msg
|
||||
);
|
||||
logging!(error, Type::Service, "获取服务模式下的 Clash 日志失败: {}", err_msg);
|
||||
bail!(err_msg);
|
||||
}
|
||||
|
||||
@@ -594,12 +562,7 @@ impl ServiceManager {
|
||||
self.0 = ServiceStatus::Unavailable("Service Uninstalled".into());
|
||||
}
|
||||
ServiceStatus::Unavailable(reason) => {
|
||||
logging!(
|
||||
info,
|
||||
Type::Service,
|
||||
"服务不可用: {},将使用Sidecar模式",
|
||||
reason
|
||||
);
|
||||
logging!(info, Type::Service, "服务不可用: {},将使用Sidecar模式", reason);
|
||||
self.0 = ServiceStatus::Unavailable(reason.clone());
|
||||
return Err(anyhow::anyhow!("服务不可用: {}", reason));
|
||||
}
|
||||
@@ -609,5 +572,4 @@ impl ServiceManager {
|
||||
}
|
||||
}
|
||||
|
||||
pub static SERVICE_MANAGER: Lazy<Mutex<ServiceManager>> =
|
||||
Lazy::new(|| Mutex::new(ServiceManager::default()));
|
||||
pub static SERVICE_MANAGER: Lazy<Mutex<ServiceManager>> = Lazy::new(|| Mutex::new(ServiceManager::default()));
|
||||
|
||||
@@ -35,10 +35,7 @@ impl Default for Sysopt {
|
||||
reset_sysproxy: AtomicBool::new(false),
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
inner_proxy: Arc::new(RwLock::new((Sysproxy::default(), Autoproxy::default()))),
|
||||
guard: Arc::new(RwLock::new(GuardMonitor::new(
|
||||
GuardType::None,
|
||||
Duration::from_secs(30),
|
||||
))),
|
||||
guard: Arc::new(RwLock::new(GuardMonitor::new(GuardType::None, Duration::from_secs(30)))),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -52,11 +49,7 @@ static DEFAULT_BYPASS: &str =
|
||||
"127.0.0.1,192.168.0.0/16,10.0.0.0/8,172.16.0.0/12,localhost,*.local,*.crashlytics.com,<local>";
|
||||
|
||||
async fn get_bypass() -> String {
|
||||
let use_default = Config::verge()
|
||||
.await
|
||||
.latest_arc()
|
||||
.use_default_bypass
|
||||
.unwrap_or(true);
|
||||
let use_default = Config::verge().await.latest_arc().use_default_bypass.unwrap_or(true);
|
||||
let res = {
|
||||
let verge = Config::verge().await;
|
||||
let verge = verge.latest_arc();
|
||||
@@ -136,9 +129,9 @@ impl Sysopt {
|
||||
);
|
||||
{
|
||||
let guard = self.access_guard();
|
||||
guard.write().set_interval(Duration::from_secs(
|
||||
verge.proxy_guard_duration.unwrap_or(30),
|
||||
));
|
||||
guard
|
||||
.write()
|
||||
.set_interval(Duration::from_secs(verge.proxy_guard_duration.unwrap_or(30)));
|
||||
}
|
||||
logging!(info, Type::Core, "Starting system proxy guard...");
|
||||
{
|
||||
@@ -180,10 +173,7 @@ impl Sysopt {
|
||||
(
|
||||
verge.enable_system_proxy.unwrap_or(false),
|
||||
verge.proxy_auto_config.unwrap_or(false),
|
||||
verge
|
||||
.proxy_host
|
||||
.clone()
|
||||
.unwrap_or_else(|| String::from("127.0.0.1")),
|
||||
verge.proxy_host.clone().unwrap_or_else(|| String::from("127.0.0.1")),
|
||||
)
|
||||
};
|
||||
|
||||
@@ -306,12 +296,7 @@ impl Sysopt {
|
||||
pub async fn update_launch(&self) -> Result<()> {
|
||||
let enable_auto_launch = { Config::verge().await.latest_arc().enable_auto_launch };
|
||||
let is_enable = enable_auto_launch.unwrap_or(false);
|
||||
logging!(
|
||||
info,
|
||||
Type::System,
|
||||
"Setting auto-launch state to: {:?}",
|
||||
is_enable
|
||||
);
|
||||
logging!(info, Type::System, "Setting auto-launch state to: {:?}", is_enable);
|
||||
|
||||
// 首先尝试使用快捷方式方法
|
||||
#[cfg(target_os = "windows")]
|
||||
|
||||
@@ -75,12 +75,7 @@ impl Timer {
|
||||
// Log timer info first
|
||||
{
|
||||
let timer_map = self.timer_map.read();
|
||||
logging!(
|
||||
info,
|
||||
Type::Timer,
|
||||
"已注册的定时任务数量: {}",
|
||||
timer_map.len()
|
||||
);
|
||||
logging!(info, Type::Timer, "已注册的定时任务数量: {}", timer_map.len());
|
||||
|
||||
for (uid, task) in timer_map.iter() {
|
||||
logging!(
|
||||
@@ -97,32 +92,30 @@ impl Timer {
|
||||
let cur_timestamp = chrono::Local::now().timestamp();
|
||||
|
||||
// Collect profiles that need immediate update
|
||||
let profiles_to_update =
|
||||
if let Some(items) = Config::profiles().await.latest_arc().get_items() {
|
||||
items
|
||||
.iter()
|
||||
.filter_map(|item| {
|
||||
let allow_auto_update =
|
||||
item.option.as_ref()?.allow_auto_update.unwrap_or_default();
|
||||
if !allow_auto_update {
|
||||
return None;
|
||||
}
|
||||
let profiles_to_update = if let Some(items) = Config::profiles().await.latest_arc().get_items() {
|
||||
items
|
||||
.iter()
|
||||
.filter_map(|item| {
|
||||
let allow_auto_update = item.option.as_ref()?.allow_auto_update.unwrap_or_default();
|
||||
if !allow_auto_update {
|
||||
return None;
|
||||
}
|
||||
|
||||
let interval = item.option.as_ref()?.update_interval? as i64;
|
||||
let updated = item.updated? as i64;
|
||||
let uid = item.uid.as_ref()?;
|
||||
let interval = item.option.as_ref()?.update_interval? as i64;
|
||||
let updated = item.updated? as i64;
|
||||
let uid = item.uid.as_ref()?;
|
||||
|
||||
if interval > 0 && cur_timestamp - updated >= interval * 60 {
|
||||
logging!(info, Type::Timer, "需要立即更新的配置: uid={}", uid);
|
||||
Some(uid.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
if interval > 0 && cur_timestamp - updated >= interval * 60 {
|
||||
logging!(info, Type::Timer, "需要立即更新的配置: uid={}", uid);
|
||||
Some(uid.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
// Advance tasks outside of locks to minimize lock contention
|
||||
if !profiles_to_update.is_empty() {
|
||||
@@ -178,12 +171,7 @@ impl Timer {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
logging!(
|
||||
info,
|
||||
Type::Timer,
|
||||
"Refreshing {} timer tasks",
|
||||
diff_map.len()
|
||||
);
|
||||
logging!(info, Type::Timer, "Refreshing {} timer tasks", diff_map.len());
|
||||
|
||||
// Apply changes - first collect operations to perform without holding locks
|
||||
let mut operations_to_add: Vec<(String, TaskID, u64)> = Vec::new();
|
||||
@@ -286,12 +274,7 @@ impl Timer {
|
||||
}
|
||||
}
|
||||
|
||||
logging!(
|
||||
debug,
|
||||
Type::Timer,
|
||||
"生成的定时更新配置数量: {}",
|
||||
new_map.len()
|
||||
);
|
||||
logging!(debug, Type::Timer, "生成的定时更新配置数量: {}", new_map.len());
|
||||
new_map
|
||||
}
|
||||
|
||||
@@ -302,12 +285,7 @@ impl Timer {
|
||||
|
||||
// Read lock for comparing current state
|
||||
let timer_map = self.timer_map.read();
|
||||
logging!(
|
||||
debug,
|
||||
Type::Timer,
|
||||
"当前 timer_map 大小: {}",
|
||||
timer_map.len()
|
||||
);
|
||||
logging!(debug, Type::Timer, "当前 timer_map 大小: {}", timer_map.len());
|
||||
|
||||
// Find tasks to modify or delete
|
||||
for (uid, task) in timer_map.iter() {
|
||||
@@ -364,13 +342,7 @@ impl Timer {
|
||||
}
|
||||
|
||||
/// Add a timer task with better error handling
|
||||
fn add_task(
|
||||
&self,
|
||||
delay_timer: &DelayTimer,
|
||||
uid: String,
|
||||
tid: TaskID,
|
||||
minutes: u64,
|
||||
) -> Result<()> {
|
||||
fn add_task(&self, delay_timer: &DelayTimer, uid: String, tid: TaskID, minutes: u64) -> Result<()> {
|
||||
logging!(
|
||||
info,
|
||||
Type::Timer,
|
||||
@@ -394,9 +366,7 @@ impl Timer {
|
||||
})
|
||||
.context("failed to create timer task")?;
|
||||
|
||||
delay_timer
|
||||
.add_task(task)
|
||||
.context("failed to add timer task")?;
|
||||
delay_timer.add_task(task).context("failed to add timer task")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -443,13 +413,7 @@ impl Timer {
|
||||
// Calculate next update time
|
||||
if updated > 0 && task_interval > 0 {
|
||||
let next_time = updated + (task_interval as i64 * 60);
|
||||
logging!(
|
||||
info,
|
||||
Type::Timer,
|
||||
"计算得到下次更新时间: {}, uid={}",
|
||||
next_time,
|
||||
uid
|
||||
);
|
||||
logging!(info, Type::Timer, "计算得到下次更新时间: {}, uid={}", next_time, uid);
|
||||
Some(next_time)
|
||||
} else {
|
||||
logging!(
|
||||
@@ -483,13 +447,7 @@ impl Timer {
|
||||
Self::emit_update_event(uid, true);
|
||||
|
||||
let is_current = Config::profiles().await.latest_arc().current.as_ref() == Some(uid);
|
||||
logging!(
|
||||
info,
|
||||
Type::Timer,
|
||||
"配置 {} 是否为当前激活配置: {}",
|
||||
uid,
|
||||
is_current
|
||||
);
|
||||
logging!(info, Type::Timer, "配置 {} 是否为当前激活配置: {}", uid, is_current);
|
||||
|
||||
feat::update_profile(uid, None, is_current, false).await
|
||||
})
|
||||
|
||||
@@ -93,29 +93,17 @@ impl TrayState {
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let tray_icon_colorful = verge
|
||||
.tray_icon
|
||||
.clone()
|
||||
.unwrap_or_else(|| "monochrome".into());
|
||||
let tray_icon_colorful = verge.tray_icon.clone().unwrap_or_else(|| "monochrome".into());
|
||||
if tray_icon_colorful == "monochrome" {
|
||||
(
|
||||
false,
|
||||
include_bytes!("../../../icons/tray-icon-mono.ico").to_vec(),
|
||||
)
|
||||
(false, include_bytes!("../../../icons/tray-icon-mono.ico").to_vec())
|
||||
} else {
|
||||
(
|
||||
false,
|
||||
include_bytes!("../../../icons/tray-icon.ico").to_vec(),
|
||||
)
|
||||
(false, include_bytes!("../../../icons/tray-icon.ico").to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
{
|
||||
(
|
||||
false,
|
||||
include_bytes!("../../../icons/tray-icon.ico").to_vec(),
|
||||
)
|
||||
(false, include_bytes!("../../../icons/tray-icon.ico").to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,29 +117,20 @@ impl TrayState {
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let tray_icon_colorful = verge
|
||||
.tray_icon
|
||||
.clone()
|
||||
.unwrap_or_else(|| "monochrome".into());
|
||||
let tray_icon_colorful = verge.tray_icon.clone().unwrap_or_else(|| "monochrome".into());
|
||||
if tray_icon_colorful == "monochrome" {
|
||||
(
|
||||
false,
|
||||
include_bytes!("../../../icons/tray-icon-sys-mono-new.ico").to_vec(),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
false,
|
||||
include_bytes!("../../../icons/tray-icon-sys.ico").to_vec(),
|
||||
)
|
||||
(false, include_bytes!("../../../icons/tray-icon-sys.ico").to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
{
|
||||
(
|
||||
false,
|
||||
include_bytes!("../../../icons/tray-icon-sys.ico").to_vec(),
|
||||
)
|
||||
(false, include_bytes!("../../../icons/tray-icon-sys.ico").to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -165,28 +144,19 @@ impl TrayState {
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let tray_icon_colorful = verge
|
||||
.tray_icon
|
||||
.clone()
|
||||
.unwrap_or_else(|| "monochrome".into());
|
||||
let tray_icon_colorful = verge.tray_icon.clone().unwrap_or_else(|| "monochrome".into());
|
||||
if tray_icon_colorful == "monochrome" {
|
||||
(
|
||||
false,
|
||||
include_bytes!("../../../icons/tray-icon-tun-mono-new.ico").to_vec(),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
false,
|
||||
include_bytes!("../../../icons/tray-icon-tun.ico").to_vec(),
|
||||
)
|
||||
(false, include_bytes!("../../../icons/tray-icon-tun.ico").to_vec())
|
||||
}
|
||||
}
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
{
|
||||
(
|
||||
false,
|
||||
include_bytes!("../../../icons/tray-icon-tun.ico").to_vec(),
|
||||
)
|
||||
(false, include_bytes!("../../../icons/tray-icon-tun.ico").to_vec())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -304,8 +274,8 @@ impl Tray {
|
||||
let verge = Config::verge().await.latest_arc();
|
||||
let system_proxy = verge.enable_system_proxy.as_ref().unwrap_or(&false);
|
||||
let tun_mode = verge.enable_tun_mode.as_ref().unwrap_or(&false);
|
||||
let tun_mode_available = is_current_app_handle_admin(app_handle)
|
||||
|| service::is_service_available().await.is_ok();
|
||||
let tun_mode_available =
|
||||
is_current_app_handle_admin(app_handle) || service::is_service_available().await.is_ok();
|
||||
let mode = {
|
||||
Config::clash()
|
||||
.await
|
||||
@@ -339,11 +309,7 @@ impl Tray {
|
||||
Ok(())
|
||||
}
|
||||
None => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Tray,
|
||||
"Failed to update tray menu: tray not found"
|
||||
);
|
||||
logging!(warn, Type::Tray, "Failed to update tray menu: tray not found");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -362,11 +328,7 @@ impl Tray {
|
||||
let tray = match app_handle.tray_by_id("main") {
|
||||
Some(tray) => tray,
|
||||
None => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Tray,
|
||||
"Failed to update tray icon: tray not found"
|
||||
);
|
||||
logging!(warn, Type::Tray, "Failed to update tray icon: tray not found");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
@@ -381,10 +343,7 @@ impl Tray {
|
||||
(false, false) => TrayState::get_common_tray_icon(verge).await,
|
||||
};
|
||||
|
||||
let colorful = verge
|
||||
.tray_icon
|
||||
.clone()
|
||||
.unwrap_or_else(|| "monochrome".into());
|
||||
let colorful = verge.tray_icon.clone().unwrap_or_else(|| "monochrome".into());
|
||||
let is_colorful = colorful == "colorful";
|
||||
|
||||
let _ = tray.set_icon(Some(tauri::image::Image::from_bytes(&icon_bytes)?));
|
||||
@@ -404,11 +363,7 @@ impl Tray {
|
||||
let tray = match app_handle.tray_by_id("main") {
|
||||
Some(tray) => tray,
|
||||
None => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Tray,
|
||||
"Failed to update tray icon: tray not found"
|
||||
);
|
||||
logging!(warn, Type::Tray, "Failed to update tray icon: tray not found");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
@@ -488,11 +443,7 @@ impl Tray {
|
||||
if let Some(tray) = app_handle.tray_by_id("main") {
|
||||
let _ = tray.set_tooltip(Some(&tooltip));
|
||||
} else {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Tray,
|
||||
"Failed to update tray tooltip: tray not found"
|
||||
);
|
||||
logging!(warn, Type::Tray, "Failed to update tray tooltip: tray not found");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -525,9 +476,7 @@ impl Tray {
|
||||
let icon = tauri::image::Image::from_bytes(&icon_bytes)?;
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
let builder = TrayIconBuilder::with_id("main")
|
||||
.icon(icon)
|
||||
.icon_as_template(false);
|
||||
let builder = TrayIconBuilder::with_id("main").icon(icon).icon_as_template(false);
|
||||
|
||||
#[cfg(any(target_os = "macos", target_os = "windows"))]
|
||||
let show_menu_on_left_click = {
|
||||
@@ -538,9 +487,7 @@ impl Tray {
|
||||
};
|
||||
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
let mut builder = TrayIconBuilder::with_id("main")
|
||||
.icon(icon)
|
||||
.icon_as_template(false);
|
||||
let mut builder = TrayIconBuilder::with_id("main").icon(icon).icon_as_template(false);
|
||||
|
||||
#[cfg(any(target_os = "macos", target_os = "windows"))]
|
||||
{
|
||||
@@ -679,18 +626,9 @@ fn create_subcreate_proxy_menu_item(
|
||||
|
||||
let display_text = format!("{} | {}", proxy_str, delay_text);
|
||||
|
||||
CheckMenuItem::with_id(
|
||||
app_handle,
|
||||
item_id,
|
||||
display_text,
|
||||
true,
|
||||
is_selected,
|
||||
None::<&str>,
|
||||
)
|
||||
.map_err(|e| {
|
||||
logging!(warn, Type::Tray, "Failed to create proxy menu item: {}", e)
|
||||
})
|
||||
.ok()
|
||||
CheckMenuItem::with_id(app_handle, item_id, display_text, true, is_selected, None::<&str>)
|
||||
.map_err(|e| logging!(warn, Type::Tray, "Failed to create proxy menu item: {}", e))
|
||||
.ok()
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -700,10 +638,8 @@ fn create_subcreate_proxy_menu_item(
|
||||
|
||||
let group_display_name = group_name.to_string();
|
||||
|
||||
let group_items_refs: Vec<&dyn IsMenuItem<Wry>> = group_items
|
||||
.iter()
|
||||
.map(|item| item as &dyn IsMenuItem<Wry>)
|
||||
.collect();
|
||||
let group_items_refs: Vec<&dyn IsMenuItem<Wry>> =
|
||||
group_items.iter().map(|item| item as &dyn IsMenuItem<Wry>).collect();
|
||||
|
||||
if let Ok(submenu) = Submenu::with_id_and_items(
|
||||
app_handle,
|
||||
@@ -715,34 +651,23 @@ fn create_subcreate_proxy_menu_item(
|
||||
let insertion_index = submenus.len();
|
||||
submenus.push((group_name.into(), insertion_index, submenu));
|
||||
} else {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Tray,
|
||||
"Failed to create proxy group submenu: {}",
|
||||
group_name
|
||||
);
|
||||
logging!(warn, Type::Tray, "Failed to create proxy group submenu: {}", group_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(order_map) = proxy_group_order_map.as_ref() {
|
||||
submenus.sort_by(
|
||||
|(name_a, original_index_a, _), (name_b, original_index_b, _)| match (
|
||||
order_map.get(name_a),
|
||||
order_map.get(name_b),
|
||||
) {
|
||||
submenus.sort_by(|(name_a, original_index_a, _), (name_b, original_index_b, _)| {
|
||||
match (order_map.get(name_a), order_map.get(name_b)) {
|
||||
(Some(index_a), Some(index_b)) => index_a.cmp(index_b),
|
||||
(Some(_), None) => std::cmp::Ordering::Less,
|
||||
(None, Some(_)) => std::cmp::Ordering::Greater,
|
||||
(None, None) => original_index_a.cmp(original_index_b),
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
submenus
|
||||
.into_iter()
|
||||
.map(|(_, _, submenu)| submenu)
|
||||
.collect()
|
||||
submenus.into_iter().map(|(_, _, submenu)| submenu).collect()
|
||||
};
|
||||
proxy_submenus
|
||||
}
|
||||
@@ -825,15 +750,14 @@ async fn create_tray_menu(
|
||||
.unwrap_or_default()
|
||||
});
|
||||
|
||||
let proxy_group_order_map: Option<
|
||||
HashMap<smartstring::SmartString<smartstring::LazyCompact>, usize>,
|
||||
> = runtime_proxy_groups_order.as_ref().map(|group_names| {
|
||||
group_names
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, name)| (name.clone(), index))
|
||||
.collect::<HashMap<String, usize>>()
|
||||
});
|
||||
let proxy_group_order_map: Option<HashMap<smartstring::SmartString<smartstring::LazyCompact>, usize>> =
|
||||
runtime_proxy_groups_order.as_ref().map(|group_names| {
|
||||
group_names
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, name)| (name.clone(), index))
|
||||
.collect::<HashMap<String, usize>>()
|
||||
});
|
||||
|
||||
let verge_settings = Config::verge().await.latest_arc();
|
||||
let show_proxy_groups_inline = verge_settings.tray_inline_proxy_groups.unwrap_or(true);
|
||||
@@ -842,8 +766,7 @@ async fn create_tray_menu(
|
||||
|
||||
let hotkeys = create_hotkeys(&verge_settings.hotkeys);
|
||||
|
||||
let profile_menu_items: Vec<CheckMenuItem<Wry>> =
|
||||
create_profile_menu_item(app_handle, profiles_preview)?;
|
||||
let profile_menu_items: Vec<CheckMenuItem<Wry>> = create_profile_menu_item(app_handle, profiles_preview)?;
|
||||
|
||||
// Pre-fetch all localized strings
|
||||
let texts = MenuTexts::new();
|
||||
@@ -915,19 +838,11 @@ async fn create_tray_menu(
|
||||
&profile_menu_items_refs,
|
||||
)?;
|
||||
|
||||
let proxy_sub_menus = create_subcreate_proxy_menu_item(
|
||||
app_handle,
|
||||
current_proxy_mode,
|
||||
proxy_group_order_map,
|
||||
proxy_nodes_data,
|
||||
);
|
||||
let proxy_sub_menus =
|
||||
create_subcreate_proxy_menu_item(app_handle, current_proxy_mode, proxy_group_order_map, proxy_nodes_data);
|
||||
|
||||
let (proxies_menu, inline_proxy_items) = create_proxy_menu_item(
|
||||
app_handle,
|
||||
show_proxy_groups_inline,
|
||||
proxy_sub_menus,
|
||||
&texts.proxies,
|
||||
)?;
|
||||
let (proxies_menu, inline_proxy_items) =
|
||||
create_proxy_menu_item(app_handle, show_proxy_groups_inline, proxy_sub_menus, &texts.proxies)?;
|
||||
|
||||
let system_proxy = &CheckMenuItem::with_id(
|
||||
app_handle,
|
||||
@@ -964,66 +879,24 @@ async fn create_tray_menu(
|
||||
hotkeys.get("entry_lightweight_mode").map(|s| s.as_str()),
|
||||
)?;
|
||||
|
||||
let copy_env = &MenuItem::with_id(
|
||||
app_handle,
|
||||
MenuIds::COPY_ENV,
|
||||
&texts.copy_env,
|
||||
true,
|
||||
None::<&str>,
|
||||
)?;
|
||||
let copy_env = &MenuItem::with_id(app_handle, MenuIds::COPY_ENV, &texts.copy_env, true, None::<&str>)?;
|
||||
|
||||
let open_app_dir = &MenuItem::with_id(
|
||||
app_handle,
|
||||
MenuIds::CONF_DIR,
|
||||
&texts.conf_dir,
|
||||
true,
|
||||
None::<&str>,
|
||||
)?;
|
||||
let open_app_dir = &MenuItem::with_id(app_handle, MenuIds::CONF_DIR, &texts.conf_dir, true, None::<&str>)?;
|
||||
|
||||
let open_core_dir = &MenuItem::with_id(
|
||||
app_handle,
|
||||
MenuIds::CORE_DIR,
|
||||
&texts.core_dir,
|
||||
true,
|
||||
None::<&str>,
|
||||
)?;
|
||||
let open_core_dir = &MenuItem::with_id(app_handle, MenuIds::CORE_DIR, &texts.core_dir, true, None::<&str>)?;
|
||||
|
||||
let open_logs_dir = &MenuItem::with_id(
|
||||
app_handle,
|
||||
MenuIds::LOGS_DIR,
|
||||
&texts.logs_dir,
|
||||
true,
|
||||
None::<&str>,
|
||||
)?;
|
||||
let open_logs_dir = &MenuItem::with_id(app_handle, MenuIds::LOGS_DIR, &texts.logs_dir, true, None::<&str>)?;
|
||||
|
||||
let open_app_log = &MenuItem::with_id(
|
||||
app_handle,
|
||||
MenuIds::APP_LOG,
|
||||
&texts.app_log,
|
||||
true,
|
||||
None::<&str>,
|
||||
)?;
|
||||
let open_app_log = &MenuItem::with_id(app_handle, MenuIds::APP_LOG, &texts.app_log, true, None::<&str>)?;
|
||||
|
||||
let open_core_log = &MenuItem::with_id(
|
||||
app_handle,
|
||||
MenuIds::CORE_LOG,
|
||||
&texts.core_log,
|
||||
true,
|
||||
None::<&str>,
|
||||
)?;
|
||||
let open_core_log = &MenuItem::with_id(app_handle, MenuIds::CORE_LOG, &texts.core_log, true, None::<&str>)?;
|
||||
|
||||
let open_dir = &Submenu::with_id_and_items(
|
||||
app_handle,
|
||||
MenuIds::OPEN_DIR,
|
||||
&texts.open_dir,
|
||||
true,
|
||||
&[
|
||||
open_app_dir,
|
||||
open_core_dir,
|
||||
open_logs_dir,
|
||||
open_app_log,
|
||||
open_core_log,
|
||||
],
|
||||
&[open_app_dir, open_core_dir, open_logs_dir, open_app_log, open_core_log],
|
||||
)?;
|
||||
|
||||
let restart_clash = &MenuItem::with_id(
|
||||
@@ -1034,13 +907,7 @@ async fn create_tray_menu(
|
||||
None::<&str>,
|
||||
)?;
|
||||
|
||||
let restart_app = &MenuItem::with_id(
|
||||
app_handle,
|
||||
MenuIds::RESTART_APP,
|
||||
&texts.restart_app,
|
||||
true,
|
||||
None::<&str>,
|
||||
)?;
|
||||
let restart_app = &MenuItem::with_id(app_handle, MenuIds::RESTART_APP, &texts.restart_app, true, None::<&str>)?;
|
||||
|
||||
let app_version = &MenuItem::with_id(
|
||||
app_handle,
|
||||
@@ -1064,19 +931,12 @@ async fn create_tray_menu(
|
||||
],
|
||||
)?;
|
||||
|
||||
let quit = &MenuItem::with_id(
|
||||
app_handle,
|
||||
MenuIds::EXIT,
|
||||
&texts.exit,
|
||||
true,
|
||||
Some("CmdOrControl+Q"),
|
||||
)?;
|
||||
let quit = &MenuItem::with_id(app_handle, MenuIds::EXIT, &texts.exit, true, Some("CmdOrControl+Q"))?;
|
||||
|
||||
let separator = &PredefinedMenuItem::separator(app_handle)?;
|
||||
|
||||
// 动态构建菜单项
|
||||
let mut menu_items: Vec<&dyn IsMenuItem<Wry>> =
|
||||
vec![open_window, outbound_modes, separator, profiles];
|
||||
let mut menu_items: Vec<&dyn IsMenuItem<Wry>> = vec![open_window, outbound_modes, separator, profiles];
|
||||
|
||||
// 如果有代理节点,添加代理节点菜单
|
||||
if show_proxy_groups_inline {
|
||||
@@ -1099,9 +959,7 @@ async fn create_tray_menu(
|
||||
quit as &dyn IsMenuItem<Wry>,
|
||||
]);
|
||||
|
||||
let menu = tauri::menu::MenuBuilder::new(app_handle)
|
||||
.items(&menu_items)
|
||||
.build()?;
|
||||
let menu = tauri::menu::MenuBuilder::new(app_handle).items(&menu_items).build()?;
|
||||
Ok(menu)
|
||||
}
|
||||
|
||||
@@ -1132,11 +990,7 @@ fn on_menu_event(_: &AppHandle, event: MenuEvent) {
|
||||
}
|
||||
MenuIds::CLOSE_ALL_CONNECTIONS => {
|
||||
if let Err(err) = handle::Handle::mihomo().await.close_all_connections().await {
|
||||
logging!(
|
||||
error,
|
||||
Type::Tray,
|
||||
"Failed to close all connections from tray: {err}"
|
||||
);
|
||||
logging!(error, Type::Tray, "Failed to close all connections from tray: {err}");
|
||||
}
|
||||
}
|
||||
MenuIds::COPY_ENV => feat::copy_clash_env().await,
|
||||
@@ -1188,12 +1042,7 @@ fn on_menu_event(_: &AppHandle, event: MenuEvent) {
|
||||
feat::switch_proxy_node(group_name, proxy_name).await;
|
||||
}
|
||||
_ => {
|
||||
logging!(
|
||||
debug,
|
||||
Type::Tray,
|
||||
"Unhandled tray menu event: {:?}",
|
||||
event.id
|
||||
);
|
||||
logging!(debug, Type::Tray, "Unhandled tray menu event: {:?}", event.id);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -45,17 +45,8 @@ impl CoreConfigValidator {
|
||||
let content = match fs::read_to_string(path).await {
|
||||
Ok(content) => content,
|
||||
Err(err) => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Validate,
|
||||
"无法读取文件以检测类型: {}, 错误: {}",
|
||||
path,
|
||||
err
|
||||
);
|
||||
return Err(anyhow::anyhow!(
|
||||
"Failed to read file to detect type: {}",
|
||||
err
|
||||
));
|
||||
logging!(warn, Type::Validate, "无法读取文件以检测类型: {}, 错误: {}", path, err);
|
||||
return Err(anyhow::anyhow!("Failed to read file to detect type: {}", err));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -102,12 +93,7 @@ impl CoreConfigValidator {
|
||||
}
|
||||
|
||||
// 默认情况:无法确定时,假设为非脚本文件(更安全)
|
||||
logging!(
|
||||
debug,
|
||||
Type::Validate,
|
||||
"无法确定文件类型,默认当作YAML处理: {}",
|
||||
path
|
||||
);
|
||||
logging!(debug, Type::Validate, "无法确定文件类型,默认当作YAML处理: {}", path);
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
@@ -188,10 +174,7 @@ impl CoreConfigValidator {
|
||||
}
|
||||
|
||||
/// 验证指定的配置文件
|
||||
pub async fn validate_config_file(
|
||||
config_path: &str,
|
||||
is_merge_file: Option<bool>,
|
||||
) -> Result<(bool, String)> {
|
||||
pub async fn validate_config_file(config_path: &str, is_merge_file: Option<bool>) -> Result<(bool, String)> {
|
||||
// 检查程序是否正在退出,如果是则跳过验证
|
||||
if handle::Handle::global().is_exiting() {
|
||||
logging!(info, Type::Core, "应用正在退出,跳过验证");
|
||||
@@ -207,12 +190,7 @@ impl CoreConfigValidator {
|
||||
|
||||
// 如果是合并文件且不是强制验证,执行语法检查但不进行完整验证
|
||||
if is_merge_file.unwrap_or(false) {
|
||||
logging!(
|
||||
info,
|
||||
Type::Validate,
|
||||
"检测到Merge文件,仅进行语法检查: {}",
|
||||
config_path
|
||||
);
|
||||
logging!(info, Type::Validate, "检测到Merge文件,仅进行语法检查: {}", config_path);
|
||||
return Self::validate_file_syntax(config_path).await;
|
||||
}
|
||||
|
||||
@@ -224,13 +202,7 @@ impl CoreConfigValidator {
|
||||
Ok(result) => result,
|
||||
Err(err) => {
|
||||
// 如果无法确定文件类型,尝试使用Clash内核验证
|
||||
logging!(
|
||||
warn,
|
||||
Type::Validate,
|
||||
"无法确定文件类型: {}, 错误: {}",
|
||||
config_path,
|
||||
err
|
||||
);
|
||||
logging!(warn, Type::Validate, "无法确定文件类型: {}, 错误: {}", config_path, err);
|
||||
return Self::validate_config_internal(config_path).await;
|
||||
}
|
||||
}
|
||||
@@ -247,12 +219,7 @@ impl CoreConfigValidator {
|
||||
}
|
||||
|
||||
// 对YAML配置文件使用Clash内核验证
|
||||
logging!(
|
||||
info,
|
||||
Type::Validate,
|
||||
"使用Clash内核验证配置文件: {}",
|
||||
config_path
|
||||
);
|
||||
logging!(info, Type::Validate, "使用Clash内核验证配置文件: {}", config_path);
|
||||
Self::validate_config_internal(config_path).await
|
||||
}
|
||||
|
||||
@@ -275,13 +242,11 @@ impl CoreConfigValidator {
|
||||
logging!(info, Type::Validate, "验证目录: {}", app_dir_str);
|
||||
|
||||
// 使用子进程运行clash验证配置
|
||||
let command = app_handle.shell().sidecar(clash_core.as_str())?.args([
|
||||
"-t",
|
||||
"-d",
|
||||
app_dir_str,
|
||||
"-f",
|
||||
config_path,
|
||||
]);
|
||||
let command =
|
||||
app_handle
|
||||
.shell()
|
||||
.sidecar(clash_core.as_str())?
|
||||
.args(["-t", "-d", app_dir_str, "-f", config_path]);
|
||||
let output = command.output().await?;
|
||||
|
||||
let status = &output.status;
|
||||
|
||||
@@ -118,19 +118,16 @@ impl ChainItem {
|
||||
/// 内建支持一些脚本
|
||||
pub fn builtin() -> Vec<(ChainSupport, Self)> {
|
||||
// meta 的一些处理
|
||||
let meta_guard =
|
||||
Self::to_script("verge_meta_guard", include_str!("./builtin/meta_guard.js"));
|
||||
let meta_guard = Self::to_script("verge_meta_guard", include_str!("./builtin/meta_guard.js"));
|
||||
|
||||
// meta 1.13.2 alpn string 转 数组
|
||||
let hy_alpn = Self::to_script("verge_hy_alpn", include_str!("./builtin/meta_hy_alpn.js"));
|
||||
|
||||
// meta 的一些处理
|
||||
let meta_guard_alpha =
|
||||
Self::to_script("verge_meta_guard", include_str!("./builtin/meta_guard.js"));
|
||||
let meta_guard_alpha = Self::to_script("verge_meta_guard", include_str!("./builtin/meta_guard.js"));
|
||||
|
||||
// meta 1.13.2 alpn string 转 数组
|
||||
let hy_alpn_alpha =
|
||||
Self::to_script("verge_hy_alpn", include_str!("./builtin/meta_hy_alpn.js"));
|
||||
let hy_alpn_alpha = Self::to_script("verge_hy_alpn", include_str!("./builtin/meta_hy_alpn.js"));
|
||||
|
||||
vec![
|
||||
(ChainSupport::ClashMeta, hy_alpn),
|
||||
|
||||
@@ -17,13 +17,7 @@ pub const HANDLE_FIELDS: [&str; 12] = [
|
||||
"unified-delay",
|
||||
];
|
||||
|
||||
pub const DEFAULT_FIELDS: [&str; 5] = [
|
||||
"proxies",
|
||||
"proxy-providers",
|
||||
"proxy-groups",
|
||||
"rule-providers",
|
||||
"rules",
|
||||
];
|
||||
pub const DEFAULT_FIELDS: [&str; 5] = ["proxies", "proxy-providers", "proxy-groups", "rule-providers", "rules"];
|
||||
|
||||
pub fn use_lowercase(config: &Mapping) -> Mapping {
|
||||
let mut ret = Mapping::new();
|
||||
|
||||
@@ -455,11 +455,7 @@ async fn merge_default_config(
|
||||
config
|
||||
}
|
||||
|
||||
fn apply_builtin_scripts(
|
||||
mut config: Mapping,
|
||||
clash_core: Option<String>,
|
||||
enable_builtin: bool,
|
||||
) -> Mapping {
|
||||
fn apply_builtin_scripts(mut config: Mapping, clash_core: Option<String>, enable_builtin: bool) -> Mapping {
|
||||
if enable_builtin {
|
||||
ChainItem::builtin()
|
||||
.into_iter()
|
||||
@@ -552,9 +548,7 @@ fn cleanup_proxy_groups(mut config: Mapping) -> Mapping {
|
||||
|
||||
if let Some(Value::Sequence(proxies)) = group_map.get_mut("proxies") {
|
||||
proxies.retain(|proxy| match proxy {
|
||||
Value::String(name) => {
|
||||
allowed_names.contains(name.as_str()) || has_valid_provider
|
||||
}
|
||||
Value::String(name) => allowed_names.contains(name.as_str()) || has_valid_provider,
|
||||
_ => true,
|
||||
});
|
||||
}
|
||||
@@ -627,8 +621,7 @@ pub async fn enhance() -> (Mapping, HashSet<String>, HashMap<String, ResultLog>)
|
||||
let profile_name = profile.profile_name;
|
||||
|
||||
// process globals
|
||||
let (config, exists_keys, result_map) =
|
||||
process_global_items(config, global_merge, global_script, &profile_name);
|
||||
let (config, exists_keys, result_map) = process_global_items(config, global_merge, global_script, &profile_name);
|
||||
|
||||
// process profile-specific items
|
||||
let (config, exists_keys, result_map) = process_profile_items(
|
||||
@@ -710,9 +703,7 @@ proxy-groups:
|
||||
|
||||
let manual_group = groups
|
||||
.iter()
|
||||
.find(|group| {
|
||||
group.get("name").and_then(serde_yaml_ng::Value::as_str) == Some("manual")
|
||||
})
|
||||
.find(|group| group.get("name").and_then(serde_yaml_ng::Value::as_str) == Some("manual"))
|
||||
.and_then(|group| group.as_mapping())
|
||||
.expect("manual group should exist");
|
||||
|
||||
@@ -722,18 +713,12 @@ proxy-groups:
|
||||
.expect("manual proxies should be a sequence");
|
||||
|
||||
assert_eq!(manual_proxies.len(), 2);
|
||||
assert!(
|
||||
manual_proxies
|
||||
.iter()
|
||||
.any(|p| p.as_str() == Some("alive-node"))
|
||||
);
|
||||
assert!(manual_proxies.iter().any(|p| p.as_str() == Some("alive-node")));
|
||||
assert!(manual_proxies.iter().any(|p| p.as_str() == Some("DIRECT")));
|
||||
|
||||
let nested_group = groups
|
||||
.iter()
|
||||
.find(|group| {
|
||||
group.get("name").and_then(serde_yaml_ng::Value::as_str) == Some("nested")
|
||||
})
|
||||
.find(|group| group.get("name").and_then(serde_yaml_ng::Value::as_str) == Some("nested"))
|
||||
.and_then(|group| group.as_mapping())
|
||||
.expect("nested group should exist");
|
||||
|
||||
@@ -778,9 +763,7 @@ proxy-groups:
|
||||
|
||||
let manual_group = groups
|
||||
.iter()
|
||||
.find(|group| {
|
||||
group.get("name").and_then(serde_yaml_ng::Value::as_str) == Some("manual")
|
||||
})
|
||||
.find(|group| group.get("name").and_then(serde_yaml_ng::Value::as_str) == Some("manual"))
|
||||
.and_then(|group| group.as_mapping())
|
||||
.expect("manual group should exist");
|
||||
|
||||
@@ -825,9 +808,7 @@ proxy-groups:
|
||||
|
||||
let manual_group = groups
|
||||
.iter()
|
||||
.find(|group| {
|
||||
group.get("name").and_then(serde_yaml_ng::Value::as_str) == Some("manual")
|
||||
})
|
||||
.find(|group| group.get("name").and_then(serde_yaml_ng::Value::as_str) == Some("manual"))
|
||||
.and_then(|group| group.as_mapping())
|
||||
.expect("manual group should exist");
|
||||
|
||||
|
||||
@@ -12,11 +12,7 @@ const MAX_OUTPUT_SIZE: usize = 1024 * 1024; // 1MB
|
||||
const MAX_JSON_SIZE: usize = 10 * 1024 * 1024; // 10MB
|
||||
|
||||
// TODO 使用引用改进上下相关处理,避免不必要 Clone
|
||||
pub fn use_script(
|
||||
script: String,
|
||||
config: &Mapping,
|
||||
name: &String,
|
||||
) -> Result<(Mapping, Vec<(String, String)>)> {
|
||||
pub fn use_script(script: String, config: &Mapping, name: &String) -> Result<(Mapping, Vec<(String, String)>)> {
|
||||
let mut context = Context::default();
|
||||
|
||||
let outputs = Arc::new(Mutex::new(vec![]));
|
||||
@@ -26,50 +22,42 @@ pub fn use_script(
|
||||
let total_size_clone = Arc::clone(&total_size);
|
||||
|
||||
let _ = context.register_global_builtin_callable("__verge_log__".into(), 2, unsafe {
|
||||
NativeFunction::from_closure(
|
||||
move |_: &JsValue, args: &[JsValue], context: &mut Context| {
|
||||
let level = args.first().ok_or_else(|| {
|
||||
boa_engine::JsError::from_opaque(
|
||||
JsString::from("Missing level argument").into(),
|
||||
)
|
||||
})?;
|
||||
let level = level.to_string(context)?;
|
||||
let level = level.to_std_string().map_err(|_| {
|
||||
boa_engine::JsError::from_opaque(
|
||||
JsString::from("Failed to convert level to string").into(),
|
||||
)
|
||||
})?;
|
||||
NativeFunction::from_closure(move |_: &JsValue, args: &[JsValue], context: &mut Context| {
|
||||
let level = args
|
||||
.first()
|
||||
.ok_or_else(|| boa_engine::JsError::from_opaque(JsString::from("Missing level argument").into()))?;
|
||||
let level = level.to_string(context)?;
|
||||
let level = level.to_std_string().map_err(|_| {
|
||||
boa_engine::JsError::from_opaque(JsString::from("Failed to convert level to string").into())
|
||||
})?;
|
||||
|
||||
let data = args.get(1).ok_or_else(|| {
|
||||
boa_engine::JsError::from_opaque(JsString::from("Missing data argument").into())
|
||||
})?;
|
||||
let data = data.to_string(context)?;
|
||||
let data = data.to_std_string().map_err(|_| {
|
||||
boa_engine::JsError::from_opaque(
|
||||
JsString::from("Failed to convert data to string").into(),
|
||||
)
|
||||
})?;
|
||||
let data = args
|
||||
.get(1)
|
||||
.ok_or_else(|| boa_engine::JsError::from_opaque(JsString::from("Missing data argument").into()))?;
|
||||
let data = data.to_string(context)?;
|
||||
let data = data.to_std_string().map_err(|_| {
|
||||
boa_engine::JsError::from_opaque(JsString::from("Failed to convert data to string").into())
|
||||
})?;
|
||||
|
||||
// 检查输出限制
|
||||
if outputs_clone.lock().len() >= MAX_OUTPUTS {
|
||||
return Err(boa_engine::JsError::from_opaque(
|
||||
JsString::from("Maximum number of log outputs exceeded").into(),
|
||||
));
|
||||
}
|
||||
// 检查输出限制
|
||||
if outputs_clone.lock().len() >= MAX_OUTPUTS {
|
||||
return Err(boa_engine::JsError::from_opaque(
|
||||
JsString::from("Maximum number of log outputs exceeded").into(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut size = total_size_clone.lock();
|
||||
let new_size = *size + level.len() + data.len();
|
||||
if new_size > MAX_OUTPUT_SIZE {
|
||||
return Err(boa_engine::JsError::from_opaque(
|
||||
JsString::from("Maximum output size exceeded").into(),
|
||||
));
|
||||
}
|
||||
*size = new_size;
|
||||
drop(size);
|
||||
outputs_clone.lock().push((level.into(), data.into()));
|
||||
Ok(JsValue::undefined())
|
||||
},
|
||||
)
|
||||
let mut size = total_size_clone.lock();
|
||||
let new_size = *size + level.len() + data.len();
|
||||
if new_size > MAX_OUTPUT_SIZE {
|
||||
return Err(boa_engine::JsError::from_opaque(
|
||||
JsString::from("Maximum output size exceeded").into(),
|
||||
));
|
||||
}
|
||||
*size = new_size;
|
||||
drop(size);
|
||||
outputs_clone.lock().push((level.into(), data.into()));
|
||||
Ok(JsValue::undefined())
|
||||
})
|
||||
});
|
||||
|
||||
let _ = context.eval(Source::from_bytes(
|
||||
@@ -127,12 +115,7 @@ pub fn use_script(
|
||||
outputs
|
||||
.lock()
|
||||
.push(("exception".into(), "Script execution failed".into()));
|
||||
logging_error!(
|
||||
Type::Config,
|
||||
"Script execution error: {}. Script name: {}",
|
||||
err,
|
||||
name
|
||||
);
|
||||
logging_error!(Type::Config, "Script execution error: {}. Script name: {}", err, name);
|
||||
Ok((config, outputs.lock().to_vec()))
|
||||
}
|
||||
}
|
||||
@@ -205,8 +188,8 @@ fn test_script() {
|
||||
";
|
||||
|
||||
let config = &serde_yaml_ng::from_str(config).expect("Failed to parse test config YAML");
|
||||
let (config, results) = use_script(script.into(), config, &String::from(""))
|
||||
.expect("Script execution should succeed in test");
|
||||
let (config, results) =
|
||||
use_script(script.into(), config, &String::from("")).expect("Script execution should succeed in test");
|
||||
|
||||
let _ = serde_yaml_ng::to_string(&config).expect("Failed to serialize config to YAML");
|
||||
let yaml_config_size = std::mem::size_of_val(&config);
|
||||
@@ -230,8 +213,7 @@ fn test_escape_unescape() {
|
||||
assert!(parsed.contains_key("nested"));
|
||||
|
||||
let quoted_json_str = r#""{"key":"value","nested":{"key":"value"}}""#;
|
||||
let parsed_quoted =
|
||||
parse_json_safely(quoted_json_str).expect("Failed to parse quoted test JSON safely");
|
||||
let parsed_quoted = parse_json_safely(quoted_json_str).expect("Failed to parse quoted test JSON safely");
|
||||
|
||||
assert!(parsed_quoted.contains_key("key"));
|
||||
assert!(parsed_quoted.contains_key("nested"));
|
||||
|
||||
@@ -62,20 +62,14 @@ pub fn use_seq(seq: SeqMap, mut config: Mapping, field: &str) -> Mapping {
|
||||
})
|
||||
.cloned()
|
||||
.collect();
|
||||
group_map.insert(
|
||||
Value::String("proxies".into()),
|
||||
Value::Sequence(filtered_proxies),
|
||||
);
|
||||
group_map.insert(Value::String("proxies".into()), Value::Sequence(filtered_proxies));
|
||||
}
|
||||
new_groups.push(Value::Mapping(group_map.to_owned()));
|
||||
} else {
|
||||
new_groups.push(group.to_owned());
|
||||
}
|
||||
}
|
||||
config.insert(
|
||||
Value::String("proxy-groups".into()),
|
||||
Value::Sequence(new_groups),
|
||||
);
|
||||
config.insert(Value::String("proxy-groups".into()), Value::Sequence(new_groups));
|
||||
}
|
||||
|
||||
config
|
||||
@@ -108,8 +102,7 @@ proxy-groups:
|
||||
proxies:
|
||||
- "proxy1"
|
||||
"#;
|
||||
let mut config: Mapping =
|
||||
serde_yaml_ng::from_str(config_str).expect("Failed to parse test config YAML");
|
||||
let mut config: Mapping = serde_yaml_ng::from_str(config_str).expect("Failed to parse test config YAML");
|
||||
|
||||
let seq = SeqMap {
|
||||
prepend: Sequence::new(),
|
||||
@@ -160,9 +153,7 @@ proxy-groups:
|
||||
|
||||
assert_eq!(group1_proxies.len(), 1);
|
||||
assert_eq!(
|
||||
group1_proxies[0]
|
||||
.as_str()
|
||||
.expect("proxy name should be string"),
|
||||
group1_proxies[0].as_str().expect("proxy name should be string"),
|
||||
"proxy2"
|
||||
);
|
||||
assert_eq!(group2_proxies.len(), 0);
|
||||
|
||||
@@ -36,10 +36,7 @@ pub fn use_tun(mut config: Mapping, enable: bool) -> Mapping {
|
||||
val.as_mapping().cloned().unwrap_or_else(Mapping::new)
|
||||
});
|
||||
let ipv6_key = Value::from("ipv6");
|
||||
let ipv6_val = config
|
||||
.get(&ipv6_key)
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false);
|
||||
let ipv6_val = config.get(&ipv6_key).and_then(|v| v.as_bool()).unwrap_or(false);
|
||||
|
||||
// 检查现有的 enhanced-mode 设置
|
||||
let current_mode = dns_val
|
||||
|
||||
@@ -47,11 +47,7 @@ async fn finalize_restored_verge_config(
|
||||
// Ensure side-effects (flags, tray, sysproxy, hotkeys, auto-backup refresh, etc.) run.
|
||||
// Use not_save_file = true to avoid extra I/O (we already persisted the restored file).
|
||||
if let Err(err) = super::patch_verge(&restored, true).await {
|
||||
logging!(
|
||||
error,
|
||||
Type::Backup,
|
||||
"Failed to apply restored verge config: {err:#?}"
|
||||
);
|
||||
logging!(error, Type::Backup, "Failed to apply restored verge config: {err:#?}");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -83,28 +79,17 @@ pub async fn create_backup_and_upload_webdav() -> Result<()> {
|
||||
/// List WebDAV backups
|
||||
pub async fn list_wevdav_backup() -> Result<Vec<ListFile>> {
|
||||
backup::WebDavClient::global().list().await.map_err(|err| {
|
||||
logging!(
|
||||
error,
|
||||
Type::Backup,
|
||||
"Failed to list WebDAV backup files: {err:#?}"
|
||||
);
|
||||
logging!(error, Type::Backup, "Failed to list WebDAV backup files: {err:#?}");
|
||||
err
|
||||
})
|
||||
}
|
||||
|
||||
/// Delete WebDAV backup
|
||||
pub async fn delete_webdav_backup(filename: String) -> Result<()> {
|
||||
backup::WebDavClient::global()
|
||||
.delete(filename)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
logging!(
|
||||
error,
|
||||
Type::Backup,
|
||||
"Failed to delete WebDAV backup file: {err:#?}"
|
||||
);
|
||||
err
|
||||
})
|
||||
backup::WebDavClient::global().delete(filename).await.map_err(|err| {
|
||||
logging!(error, Type::Backup, "Failed to delete WebDAV backup file: {err:#?}");
|
||||
err
|
||||
})
|
||||
}
|
||||
|
||||
/// Restore WebDAV backup
|
||||
@@ -122,11 +107,7 @@ pub async fn restore_webdav_backup(filename: String) -> Result<()> {
|
||||
.download(filename, backup_storage_path.clone())
|
||||
.await
|
||||
.map_err(|err| {
|
||||
logging!(
|
||||
error,
|
||||
Type::Backup,
|
||||
"Failed to download WebDAV backup file: {err:#?}"
|
||||
);
|
||||
logging!(error, Type::Backup, "Failed to download WebDAV backup file: {err:#?}");
|
||||
err
|
||||
})?;
|
||||
|
||||
@@ -153,11 +134,7 @@ where
|
||||
F: FnOnce(&str) -> String,
|
||||
{
|
||||
let (file_name, temp_file_path) = backup::create_backup().await.map_err(|err| {
|
||||
logging!(
|
||||
error,
|
||||
Type::Backup,
|
||||
"Failed to create local backup: {err:#?}"
|
||||
);
|
||||
logging!(error, Type::Backup, "Failed to create local backup: {err:#?}");
|
||||
err
|
||||
})?;
|
||||
|
||||
@@ -166,11 +143,7 @@ where
|
||||
let target_path = backup_dir.join(final_name.as_str());
|
||||
|
||||
if let Err(err) = move_file(temp_file_path.clone(), target_path.clone()).await {
|
||||
logging!(
|
||||
error,
|
||||
Type::Backup,
|
||||
"Failed to move local backup file: {err:#?}"
|
||||
);
|
||||
logging!(error, Type::Backup, "Failed to move local backup file: {err:#?}");
|
||||
// 清理临时文件
|
||||
if let Err(clean_err) = temp_file_path.remove_if_exists().await {
|
||||
logging!(
|
||||
@@ -251,12 +224,7 @@ pub async fn delete_local_backup(filename: String) -> Result<()> {
|
||||
let backup_dir = local_backup_dir()?;
|
||||
let target_path = backup_dir.join(filename.as_str());
|
||||
if !target_path.exists() {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Backup,
|
||||
"Local backup file not found: {}",
|
||||
filename
|
||||
);
|
||||
logging!(warn, Type::Backup, "Local backup file not found: {}", filename);
|
||||
return Ok(());
|
||||
}
|
||||
target_path.remove_if_exists().await?;
|
||||
|
||||
@@ -75,16 +75,10 @@ pub async fn change_clash_mode(mode: String) {
|
||||
"mode": mode
|
||||
});
|
||||
logging!(debug, Type::Core, "change clash mode to {mode}");
|
||||
match handle::Handle::mihomo()
|
||||
.await
|
||||
.patch_base_config(&json_value)
|
||||
.await
|
||||
{
|
||||
match handle::Handle::mihomo().await.patch_base_config(&json_value).await {
|
||||
Ok(_) => {
|
||||
// 更新订阅
|
||||
Config::clash()
|
||||
.await
|
||||
.edit_draft(|d| d.patch_config(&mapping));
|
||||
Config::clash().await.edit_draft(|d| d.patch_config(&mapping));
|
||||
|
||||
// 分离数据获取和异步调用
|
||||
let clash_data = Config::clash().await.data_arc();
|
||||
@@ -99,11 +93,7 @@ pub async fn change_clash_mode(mode: String) {
|
||||
);
|
||||
}
|
||||
|
||||
let is_auto_close_connection = Config::verge()
|
||||
.await
|
||||
.data_arc()
|
||||
.auto_close_connection
|
||||
.unwrap_or(false);
|
||||
let is_auto_close_connection = Config::verge().await.data_arc().auto_close_connection.unwrap_or(false);
|
||||
if is_auto_close_connection {
|
||||
after_change_clash_mode();
|
||||
}
|
||||
@@ -117,11 +107,7 @@ pub async fn test_delay(url: String) -> anyhow::Result<u32> {
|
||||
use crate::utils::network::{NetworkManager, ProxyType};
|
||||
use tokio::time::Instant;
|
||||
|
||||
let tun_mode = Config::verge()
|
||||
.await
|
||||
.latest_arc()
|
||||
.enable_tun_mode
|
||||
.unwrap_or(false);
|
||||
let tun_mode = Config::verge().await.latest_arc().enable_tun_mode.unwrap_or(false);
|
||||
|
||||
// 如果是TUN模式,不使用代理,否则使用自身代理
|
||||
let proxy_type = if !tun_mode {
|
||||
|
||||
@@ -27,9 +27,7 @@ pub async fn patch_clash(patch: &Mapping) -> Result<()> {
|
||||
.await
|
||||
);
|
||||
}
|
||||
Config::runtime()
|
||||
.await
|
||||
.edit_draft(|d| d.patch_config(patch));
|
||||
Config::runtime().await.edit_draft(|d| d.patch_config(patch));
|
||||
CoreManager::global().update_config().await?;
|
||||
}
|
||||
handle::Handle::refresh_clash();
|
||||
@@ -257,10 +255,7 @@ pub async fn patch_verge(patch: &IVerge, not_save_file: bool) -> Result<()> {
|
||||
return Err(err);
|
||||
}
|
||||
Config::verge().await.apply();
|
||||
logging_error!(
|
||||
Type::Backup,
|
||||
AutoBackupManager::global().refresh_settings().await
|
||||
);
|
||||
logging_error!(Type::Backup, AutoBackupManager::global().refresh_settings().await);
|
||||
if !not_save_file {
|
||||
// 分离数据获取和异步调用
|
||||
let verge_data = Config::verge().await.data_arc();
|
||||
|
||||
@@ -23,13 +23,7 @@ pub async fn switch_proxy_node(group_name: &str, proxy_name: &str) {
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
logging!(
|
||||
info,
|
||||
Type::Tray,
|
||||
"切换代理成功: {} -> {}",
|
||||
group_name,
|
||||
proxy_name
|
||||
);
|
||||
logging!(info, Type::Tray, "切换代理成功: {} -> {}", group_name, proxy_name);
|
||||
let _ = handle::Handle::app_handle().emit("verge://refresh-proxy-config", ());
|
||||
let _ = tray::Tray::global().update_menu().await;
|
||||
return;
|
||||
@@ -52,13 +46,7 @@ pub async fn switch_proxy_node(group_name: &str, proxy_name: &str) {
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
logging!(
|
||||
info,
|
||||
Type::Tray,
|
||||
"代理切换回退成功: {} -> {}",
|
||||
group_name,
|
||||
proxy_name
|
||||
);
|
||||
logging!(info, Type::Tray, "代理切换回退成功: {} -> {}", group_name, proxy_name);
|
||||
let _ = tray::Tray::global().update_menu().await;
|
||||
}
|
||||
Err(err) => {
|
||||
@@ -74,42 +62,20 @@ pub async fn switch_proxy_node(group_name: &str, proxy_name: &str) {
|
||||
}
|
||||
}
|
||||
|
||||
async fn should_update_profile(
|
||||
uid: &String,
|
||||
ignore_auto_update: bool,
|
||||
) -> Result<Option<(String, Option<PrfOption>)>> {
|
||||
async fn should_update_profile(uid: &String, ignore_auto_update: bool) -> Result<Option<(String, Option<PrfOption>)>> {
|
||||
let profiles = Config::profiles().await;
|
||||
let profiles = profiles.latest_arc();
|
||||
let item = profiles.get_item(uid)?;
|
||||
let is_remote = item.itype.as_ref().is_some_and(|s| s == "remote");
|
||||
|
||||
if !is_remote {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"[订阅更新] {uid} 不是远程订阅,跳过更新"
|
||||
);
|
||||
logging!(info, Type::Config, "[订阅更新] {uid} 不是远程订阅,跳过更新");
|
||||
Ok(None)
|
||||
} else if item.url.is_none() {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Config,
|
||||
"Warning: [订阅更新] {uid} 缺少URL,无法更新"
|
||||
);
|
||||
logging!(warn, Type::Config, "Warning: [订阅更新] {uid} 缺少URL,无法更新");
|
||||
bail!("failed to get the profile item url");
|
||||
} else if !ignore_auto_update
|
||||
&& !item
|
||||
.option
|
||||
.as_ref()
|
||||
.and_then(|o| o.allow_auto_update)
|
||||
.unwrap_or(true)
|
||||
{
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"[订阅更新] {} 禁止自动更新,跳过更新",
|
||||
uid
|
||||
);
|
||||
} else if !ignore_auto_update && !item.option.as_ref().and_then(|o| o.allow_auto_update).unwrap_or(true) {
|
||||
logging!(info, Type::Config, "[订阅更新] {} 禁止自动更新,跳过更新", uid);
|
||||
Ok(None)
|
||||
} else {
|
||||
logging!(
|
||||
@@ -122,9 +88,7 @@ async fn should_update_profile(
|
||||
.ok_or_else(|| anyhow::anyhow!("Profile URL is None"))?
|
||||
);
|
||||
Ok(Some((
|
||||
item.url
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow::anyhow!("Profile URL is None"))?,
|
||||
item.url.clone().ok_or_else(|| anyhow::anyhow!("Profile URL is None"))?,
|
||||
item.option.clone(),
|
||||
)))
|
||||
}
|
||||
@@ -172,11 +136,7 @@ async fn perform_profile_update(
|
||||
|
||||
match PrfItem::from_url(url, None, None, merged_opt.as_ref()).await {
|
||||
Ok(mut item) => {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"[订阅更新] 使用 Clash代理 更新订阅配置成功"
|
||||
);
|
||||
logging!(info, Type::Config, "[订阅更新] 使用 Clash代理 更新订阅配置成功");
|
||||
profiles_draft_update_item_safe(uid, &mut item).await?;
|
||||
handle::Handle::notice_message("update_with_clash_proxy", profile_name);
|
||||
drop(last_err);
|
||||
@@ -197,11 +157,7 @@ async fn perform_profile_update(
|
||||
|
||||
match PrfItem::from_url(url, None, None, merged_opt.as_ref()).await {
|
||||
Ok(mut item) => {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"[订阅更新] 使用 系统代理 更新订阅配置成功"
|
||||
);
|
||||
logging!(info, Type::Config, "[订阅更新] 使用 系统代理 更新订阅配置成功");
|
||||
profiles_draft_update_item_safe(uid, &mut item).await?;
|
||||
handle::Handle::notice_message("update_with_clash_proxy", profile_name);
|
||||
drop(last_err);
|
||||
@@ -217,10 +173,7 @@ async fn perform_profile_update(
|
||||
}
|
||||
}
|
||||
|
||||
handle::Handle::notice_message(
|
||||
"update_failed_even_with_clash",
|
||||
format!("{profile_name} - {last_err}"),
|
||||
);
|
||||
handle::Handle::notice_message("update_failed_even_with_clash", format!("{profile_name} - {last_err}"));
|
||||
Ok(is_current)
|
||||
}
|
||||
|
||||
@@ -234,9 +187,7 @@ pub async fn update_profile(
|
||||
let url_opt = should_update_profile(uid, ignore_auto_update).await?;
|
||||
|
||||
let should_refresh = match url_opt {
|
||||
Some((url, opt)) => {
|
||||
perform_profile_update(uid, &url, opt.as_ref(), option).await? && auto_refresh
|
||||
}
|
||||
Some((url, opt)) => perform_profile_update(uid, &url, opt.as_ref(), option).await? && auto_refresh,
|
||||
None => auto_refresh,
|
||||
};
|
||||
|
||||
|
||||
@@ -17,11 +17,7 @@ pub async fn toggle_system_proxy() {
|
||||
&& auto_close_connection
|
||||
&& let Err(err) = handle::Handle::mihomo().await.close_all_connections().await
|
||||
{
|
||||
logging!(
|
||||
error,
|
||||
Type::ProxyMode,
|
||||
"Failed to close all connections: {err}"
|
||||
);
|
||||
logging!(error, Type::ProxyMode, "Failed to close all connections: {err}");
|
||||
}
|
||||
|
||||
let patch_result = super::patch_verge(
|
||||
@@ -86,9 +82,7 @@ pub async fn copy_clash_env() {
|
||||
let env_type = verge_cfg.env_type.as_deref().unwrap_or(default_env);
|
||||
|
||||
let export_text = match env_type {
|
||||
"bash" => format!(
|
||||
"export https_proxy={http_proxy} http_proxy={http_proxy} all_proxy={socks5_proxy}"
|
||||
),
|
||||
"bash" => format!("export https_proxy={http_proxy} http_proxy={http_proxy} all_proxy={socks5_proxy}"),
|
||||
"cmd" => format!("set http_proxy={http_proxy}\r\nset https_proxy={http_proxy}"),
|
||||
"powershell" => {
|
||||
format!("$env:HTTP_PROXY=\"{http_proxy}\"; $env:HTTPS_PROXY=\"{http_proxy}\"")
|
||||
@@ -98,11 +92,7 @@ pub async fn copy_clash_env() {
|
||||
}
|
||||
"fish" => format!("set -x http_proxy {http_proxy}; set -x https_proxy {http_proxy}"),
|
||||
_ => {
|
||||
logging!(
|
||||
error,
|
||||
Type::ProxyMode,
|
||||
"copy_clash_env: Invalid env type! {env_type}"
|
||||
);
|
||||
logging!(error, Type::ProxyMode, "copy_clash_env: Invalid env type! {env_type}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -51,11 +51,7 @@ pub async fn clean_async() -> bool {
|
||||
use winapi::um::winuser::{GetSystemMetrics, SM_SHUTTINGDOWN};
|
||||
|
||||
// 检查系统代理是否开启
|
||||
let sys_proxy_enabled = Config::verge()
|
||||
.await
|
||||
.data_arc()
|
||||
.enable_system_proxy
|
||||
.unwrap_or(false);
|
||||
let sys_proxy_enabled = Config::verge().await.data_arc().enable_system_proxy.unwrap_or(false);
|
||||
|
||||
if !sys_proxy_enabled {
|
||||
logging!(info, Type::Window, "系统代理未启用,跳过重置");
|
||||
@@ -67,11 +63,7 @@ pub async fn clean_async() -> bool {
|
||||
|
||||
if is_shutting_down {
|
||||
// sysproxy-rs 操作注册表(避免.exe的dll错误)
|
||||
logging!(
|
||||
info,
|
||||
Type::Window,
|
||||
"检测到正在关机,syspro-rs操作注册表关闭系统代理"
|
||||
);
|
||||
logging!(info, Type::Window, "检测到正在关机,syspro-rs操作注册表关闭系统代理");
|
||||
|
||||
match Sysproxy::get_system_proxy() {
|
||||
Ok(mut sysproxy) => {
|
||||
@@ -99,12 +91,7 @@ pub async fn clean_async() -> bool {
|
||||
// 正常退出:使用 sysproxy.exe 重置代理
|
||||
logging!(info, Type::Window, "sysproxy.exe重置系统代理");
|
||||
|
||||
match timeout(
|
||||
Duration::from_secs(2),
|
||||
sysopt::Sysopt::global().reset_sysproxy(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
match timeout(Duration::from_secs(2), sysopt::Sysopt::global().reset_sysproxy()).await {
|
||||
Ok(Ok(_)) => {
|
||||
logging!(info, Type::Window, "系统代理已重置");
|
||||
true
|
||||
@@ -114,11 +101,7 @@ pub async fn clean_async() -> bool {
|
||||
true
|
||||
}
|
||||
Err(_) => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Window,
|
||||
"Warning: 重置系统代理超时,继续退出流程"
|
||||
);
|
||||
logging!(warn, Type::Window, "Warning: 重置系统代理超时,继续退出流程");
|
||||
true
|
||||
}
|
||||
}
|
||||
@@ -127,11 +110,7 @@ pub async fn clean_async() -> bool {
|
||||
// 非 Windows 平台:正常重置代理
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
let sys_proxy_enabled = Config::verge()
|
||||
.await
|
||||
.data_arc()
|
||||
.enable_system_proxy
|
||||
.unwrap_or(false);
|
||||
let sys_proxy_enabled = Config::verge().await.data_arc().enable_system_proxy.unwrap_or(false);
|
||||
|
||||
if !sys_proxy_enabled {
|
||||
logging!(info, Type::Window, "系统代理未启用,跳过重置");
|
||||
@@ -140,12 +119,7 @@ pub async fn clean_async() -> bool {
|
||||
|
||||
logging!(info, Type::Window, "开始重置系统代理...");
|
||||
|
||||
match timeout(
|
||||
Duration::from_millis(1500),
|
||||
sysopt::Sysopt::global().reset_sysproxy(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
match timeout(Duration::from_millis(1500), sysopt::Sysopt::global().reset_sysproxy()).await {
|
||||
Ok(Ok(_)) => {
|
||||
logging!(info, Type::Window, "系统代理已重置");
|
||||
true
|
||||
@@ -165,20 +139,14 @@ pub async fn clean_async() -> bool {
|
||||
// 关闭 Tun 模式 + 停止核心服务
|
||||
let core_task = tokio::task::spawn(async {
|
||||
logging!(info, Type::System, "disable tun");
|
||||
let tun_enabled = Config::verge()
|
||||
.await
|
||||
.data_arc()
|
||||
.enable_tun_mode
|
||||
.unwrap_or(false);
|
||||
let tun_enabled = Config::verge().await.data_arc().enable_tun_mode.unwrap_or(false);
|
||||
if tun_enabled {
|
||||
let disable_tun = serde_json::json!({ "tun": { "enable": false } });
|
||||
|
||||
logging!(info, Type::System, "send disable tun request to mihomo");
|
||||
match timeout(
|
||||
Duration::from_millis(1000),
|
||||
handle::Handle::mihomo()
|
||||
.await
|
||||
.patch_base_config(&disable_tun),
|
||||
handle::Handle::mihomo().await.patch_base_config(&disable_tun),
|
||||
)
|
||||
.await
|
||||
{
|
||||
|
||||
@@ -134,8 +134,7 @@ mod app_init {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_handlers()
|
||||
-> impl Fn(tauri::ipc::Invoke<tauri::Wry>) -> bool + Send + Sync + 'static {
|
||||
pub fn generate_handlers() -> impl Fn(tauri::ipc::Invoke<tauri::Wry>) -> bool + Send + Sync + 'static {
|
||||
tauri::generate_handler![
|
||||
tauri_plugin_clash_verge_sysinfo::commands::get_system_info,
|
||||
tauri_plugin_clash_verge_sysinfo::commands::get_app_uptime,
|
||||
@@ -330,11 +329,7 @@ pub fn run() {
|
||||
|
||||
pub fn handle_window_focus(focused: bool) {
|
||||
AsyncHandler::spawn(move || async move {
|
||||
let is_enable_global_hotkey = Config::verge()
|
||||
.await
|
||||
.data_arc()
|
||||
.enable_global_hotkey
|
||||
.unwrap_or(true);
|
||||
let is_enable_global_hotkey = Config::verge().await.data_arc().enable_global_hotkey.unwrap_or(true);
|
||||
|
||||
if focused {
|
||||
#[cfg(target_os = "macos")]
|
||||
@@ -370,11 +365,7 @@ pub fn run() {
|
||||
AsyncHandler::spawn(move || async move {
|
||||
let _ = hotkey::Hotkey::global().unregister_system_hotkey(SystemHotkey::CmdQ);
|
||||
let _ = hotkey::Hotkey::global().unregister_system_hotkey(SystemHotkey::CmdW);
|
||||
let is_enable_global_hotkey = Config::verge()
|
||||
.await
|
||||
.data_arc()
|
||||
.enable_global_hotkey
|
||||
.unwrap_or(true);
|
||||
let is_enable_global_hotkey = Config::verge().await.data_arc().enable_global_hotkey.unwrap_or(true);
|
||||
if !is_enable_global_hotkey {
|
||||
let _ = hotkey::Hotkey::global().reset();
|
||||
}
|
||||
@@ -386,27 +377,15 @@ pub fn run() {
|
||||
let context = tauri::test::mock_context(tauri::test::noop_assets());
|
||||
#[cfg(feature = "clippy")]
|
||||
let app = builder.build(context).unwrap_or_else(|e| {
|
||||
logging!(
|
||||
error,
|
||||
Type::Setup,
|
||||
"Failed to build Tauri application: {}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Setup, "Failed to build Tauri application: {}", e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
#[cfg(not(feature = "clippy"))]
|
||||
let app = builder
|
||||
.build(tauri::generate_context!())
|
||||
.unwrap_or_else(|e| {
|
||||
logging!(
|
||||
error,
|
||||
Type::Setup,
|
||||
"Failed to build Tauri application: {}",
|
||||
e
|
||||
);
|
||||
std::process::exit(1);
|
||||
});
|
||||
let app = builder.build(tauri::generate_context!()).unwrap_or_else(|e| {
|
||||
logging!(error, Type::Setup, "Failed to build Tauri application: {}", e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
app.run(|app_handle, e| match e {
|
||||
tauri::RunEvent::Ready | tauri::RunEvent::Resumed => {
|
||||
@@ -417,8 +396,7 @@ pub fn run() {
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
tauri::RunEvent::Reopen {
|
||||
has_visible_windows,
|
||||
..
|
||||
has_visible_windows, ..
|
||||
} => {
|
||||
if core::handle::Handle::global().is_exiting() {
|
||||
return;
|
||||
@@ -435,10 +413,7 @@ pub fn run() {
|
||||
}),
|
||||
tauri::RunEvent::ExitRequested { api, code, .. } => {
|
||||
AsyncHandler::block_on(async {
|
||||
let _ = handle::Handle::mihomo()
|
||||
.await
|
||||
.clear_all_ws_connections()
|
||||
.await;
|
||||
let _ = handle::Handle::mihomo().await.clear_all_ws_connections().await;
|
||||
});
|
||||
|
||||
if core::handle::Handle::global().is_exiting() {
|
||||
|
||||
@@ -214,27 +214,14 @@ impl AutoBackupManager {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let file_name =
|
||||
create_local_backup_with_namer(|name| append_auto_suffix(name, trigger.slug()).into())
|
||||
.await?;
|
||||
self.last_backup
|
||||
.store(Local::now().timestamp(), Ordering::Release);
|
||||
let file_name = create_local_backup_with_namer(|name| append_auto_suffix(name, trigger.slug()).into()).await?;
|
||||
self.last_backup.store(Local::now().timestamp(), Ordering::Release);
|
||||
|
||||
if let Err(err) = cleanup_auto_backups().await {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Backup,
|
||||
"Failed to cleanup old auto backups: {err:#?}"
|
||||
);
|
||||
logging!(warn, Type::Backup, "Failed to cleanup old auto backups: {err:#?}");
|
||||
}
|
||||
|
||||
logging!(
|
||||
info,
|
||||
Type::Backup,
|
||||
"Auto backup created ({:?}): {}",
|
||||
trigger,
|
||||
file_name
|
||||
);
|
||||
logging!(info, Type::Backup, "Auto backup created ({:?}): {}", trigger, file_name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -273,11 +260,7 @@ async fn cleanup_auto_backups() -> Result<()> {
|
||||
let mut entries = match fs::read_dir(&backup_dir).await {
|
||||
Ok(dir) => dir,
|
||||
Err(err) => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Backup,
|
||||
"Failed to read backup directory: {err:#?}"
|
||||
);
|
||||
logging!(warn, Type::Backup, "Failed to read backup directory: {err:#?}");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
@@ -51,12 +51,7 @@ fn get_state() -> LightweightState {
|
||||
#[inline]
|
||||
fn try_transition(from: LightweightState, to: LightweightState) -> bool {
|
||||
LIGHTWEIGHT_STATE
|
||||
.compare_exchange(
|
||||
from.as_u8(),
|
||||
to.as_u8(),
|
||||
Ordering::AcqRel,
|
||||
Ordering::Relaxed,
|
||||
)
|
||||
.compare_exchange(from.as_u8(), to.as_u8(), Ordering::AcqRel, Ordering::Relaxed)
|
||||
.is_ok()
|
||||
}
|
||||
|
||||
@@ -83,10 +78,7 @@ async fn refresh_lightweight_tray_state() {
|
||||
|
||||
pub async fn auto_lightweight_boot() -> Result<()> {
|
||||
let verge_config = Config::verge().await;
|
||||
let is_enable_auto = verge_config
|
||||
.data_arc()
|
||||
.enable_auto_light_weight_mode
|
||||
.unwrap_or(false);
|
||||
let is_enable_auto = verge_config.data_arc().enable_auto_light_weight_mode.unwrap_or(false);
|
||||
let is_silent_start = verge_config.data_arc().enable_silent_start.unwrap_or(false);
|
||||
if is_enable_auto {
|
||||
enable_auto_light_weight_mode().await;
|
||||
@@ -182,11 +174,7 @@ fn setup_webview_focus_listener() {
|
||||
if let Some(window) = handle::Handle::get_window() {
|
||||
let handler_id = window.listen("tauri://focus", move |_event| {
|
||||
logging_error!(Type::Lightweight, cancel_light_weight_timer());
|
||||
logging!(
|
||||
debug,
|
||||
Type::Lightweight,
|
||||
"监听到窗口获得焦点,取消轻量模式计时"
|
||||
);
|
||||
logging!(debug, Type::Lightweight, "监听到窗口获得焦点,取消轻量模式计时");
|
||||
});
|
||||
WEBVIEW_FOCUS_HANDLER_ID.store(handler_id, Ordering::Release);
|
||||
}
|
||||
@@ -207,11 +195,7 @@ async fn setup_light_weight_timer() -> Result<()> {
|
||||
return Err(e).context("failed to initialize timer");
|
||||
}
|
||||
|
||||
let once_by_minutes = Config::verge()
|
||||
.await
|
||||
.data_arc()
|
||||
.auto_light_weight_minutes
|
||||
.unwrap_or(10);
|
||||
let once_by_minutes = Config::verge().await.data_arc().auto_light_weight_minutes.unwrap_or(10);
|
||||
|
||||
{
|
||||
let timer_map = Timer::global().timer_map.read();
|
||||
@@ -239,9 +223,7 @@ async fn setup_light_weight_timer() -> Result<()> {
|
||||
|
||||
{
|
||||
let delay_timer = Timer::global().delay_timer.write();
|
||||
delay_timer
|
||||
.add_task(task)
|
||||
.context("failed to add timer task")?;
|
||||
delay_timer.add_task(task).context("failed to add timer task")?;
|
||||
}
|
||||
|
||||
{
|
||||
@@ -265,10 +247,7 @@ async fn setup_light_weight_timer() -> Result<()> {
|
||||
}
|
||||
|
||||
fn cancel_light_weight_timer() -> Result<()> {
|
||||
let value = Timer::global()
|
||||
.timer_map
|
||||
.write()
|
||||
.remove(LIGHT_WEIGHT_TASK_UID);
|
||||
let value = Timer::global().timer_map.write().remove(LIGHT_WEIGHT_TASK_UID);
|
||||
if let Some(task) = value {
|
||||
Timer::global()
|
||||
.delay_timer
|
||||
|
||||
@@ -62,12 +62,7 @@ impl AsyncHandler {
|
||||
let location = Location::caller();
|
||||
let type_str = type_name::<F>();
|
||||
let size_str = format!("{} bytes", size);
|
||||
let loc_str = format!(
|
||||
"{}:{}:{}",
|
||||
location.file(),
|
||||
location.line(),
|
||||
location.column()
|
||||
);
|
||||
let loc_str = format!("{}:{}:{}", location.file(), location.line(), location.column());
|
||||
|
||||
println!(
|
||||
"┌────────────────────┬─────────────────────────────────────────────────────────────────────────────┐"
|
||||
|
||||
@@ -28,8 +28,7 @@ pub fn get_startup_dir() -> Result<PathBuf> {
|
||||
/// 获取当前可执行文件路径
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn get_exe_path() -> Result<PathBuf> {
|
||||
let exe_path =
|
||||
std::env::current_exe().map_err(|e| anyhow!("无法获取当前可执行文件路径: {}", e))?;
|
||||
let exe_path = std::env::current_exe().map_err(|e| anyhow!("无法获取当前可执行文件路径: {}", e))?;
|
||||
|
||||
Ok(exe_path)
|
||||
}
|
||||
|
||||
@@ -60,11 +60,7 @@ pub fn app_home_dir() -> Result<PathBuf> {
|
||||
match app_handle.path().data_dir() {
|
||||
Ok(dir) => Ok(dir.join(APP_ID)),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::File,
|
||||
"Failed to get the app home directory: {e}"
|
||||
);
|
||||
logging!(error, Type::File, "Failed to get the app home directory: {e}");
|
||||
Err(anyhow::anyhow!("Failed to get the app homedirectory"))
|
||||
}
|
||||
}
|
||||
@@ -78,11 +74,7 @@ pub fn app_resources_dir() -> Result<PathBuf> {
|
||||
match app_handle.path().resource_dir() {
|
||||
Ok(dir) => Ok(dir.join("resources")),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::File,
|
||||
"Failed to get the resource directory: {e}"
|
||||
);
|
||||
logging!(error, Type::File, "Failed to get the resource directory: {e}");
|
||||
Err(anyhow::anyhow!("Failed to get the resource directory"))
|
||||
}
|
||||
}
|
||||
@@ -110,15 +102,11 @@ pub fn find_target_icons(target: &str) -> Result<Option<String>> {
|
||||
let ext_matches = path
|
||||
.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.is_some_and(|ext| {
|
||||
ext.eq_ignore_ascii_case("ico") || ext.eq_ignore_ascii_case("png")
|
||||
});
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("ico") || ext.eq_ignore_ascii_case("png"));
|
||||
prefix_matches && ext_matches
|
||||
});
|
||||
|
||||
icon_path
|
||||
.map(|path| path_to_str(&path).map(|s| s.into()))
|
||||
.transpose()
|
||||
icon_path.map(|path| path_to_str(&path).map(|s| s.into())).transpose()
|
||||
}
|
||||
|
||||
/// logs dir
|
||||
@@ -179,9 +167,7 @@ pub fn service_log_dir() -> Result<PathBuf> {
|
||||
pub fn clash_latest_log() -> Result<PathBuf> {
|
||||
match *CoreManager::global().get_running_mode() {
|
||||
RunningMode::Service => Ok(service_log_dir()?.join("service_latest.log")),
|
||||
RunningMode::Sidecar | RunningMode::NotRunning => {
|
||||
Ok(sidecar_log_dir()?.join("sidecar_latest.log"))
|
||||
}
|
||||
RunningMode::Sidecar | RunningMode::NotRunning => Ok(sidecar_log_dir()?.join("sidecar_latest.log")),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -207,12 +193,10 @@ pub fn get_encryption_key() -> Result<Vec<u8>> {
|
||||
|
||||
// Ensure directory exists
|
||||
if let Some(parent) = key_path.parent() {
|
||||
fs::create_dir_all(parent)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to create key directory: {}", e))?;
|
||||
fs::create_dir_all(parent).map_err(|e| anyhow::anyhow!("Failed to create key directory: {}", e))?;
|
||||
}
|
||||
// Save key
|
||||
fs::write(&key_path, &key)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to save encryption key: {}", e))?;
|
||||
fs::write(&key_path, &key).map_err(|e| anyhow::anyhow!("Failed to save encryption key: {}", e))?;
|
||||
Ok(key)
|
||||
}
|
||||
}
|
||||
@@ -228,11 +212,7 @@ pub fn ensure_mihomo_safe_dir() -> Option<PathBuf> {
|
||||
if home_config.exists() || fs::create_dir_all(&home_config).is_ok() {
|
||||
Some(home_config)
|
||||
} else {
|
||||
logging!(
|
||||
error,
|
||||
Type::File,
|
||||
"Failed to create safe directory: {home_config:?}"
|
||||
);
|
||||
logging!(error, Type::File, "Failed to create safe directory: {home_config:?}");
|
||||
None
|
||||
}
|
||||
})
|
||||
|
||||
@@ -35,19 +35,14 @@ pub async fn read_mapping(path: &PathBuf) -> Result<Mapping> {
|
||||
|
||||
Ok(val
|
||||
.as_mapping()
|
||||
.ok_or_else(|| {
|
||||
anyhow!("failed to transform to yaml mapping \"{}\"", path.display())
|
||||
})?
|
||||
.ok_or_else(|| anyhow!("failed to transform to yaml mapping \"{}\"", path.display()))?
|
||||
.to_owned())
|
||||
}
|
||||
Err(err) => {
|
||||
let error_msg = format!("YAML syntax error in {}: {}", path.display(), err);
|
||||
logging!(error, Type::Config, "{}", error_msg);
|
||||
|
||||
crate::core::handle::Handle::notice_message(
|
||||
"config_validate::yaml_syntax_error",
|
||||
&error_msg,
|
||||
);
|
||||
crate::core::handle::Handle::notice_message("config_validate::yaml_syntax_error", &error_msg);
|
||||
|
||||
bail!("YAML syntax error: {}", err)
|
||||
}
|
||||
@@ -61,11 +56,7 @@ pub async fn read_seq_map(path: &PathBuf) -> Result<SeqMap> {
|
||||
|
||||
/// save the data to the file
|
||||
/// can set `prefix` string to add some comments
|
||||
pub async fn save_yaml<T: Serialize + Sync>(
|
||||
path: &PathBuf,
|
||||
data: &T,
|
||||
prefix: Option<&str>,
|
||||
) -> Result<()> {
|
||||
pub async fn save_yaml<T: Serialize + Sync>(path: &PathBuf, data: &T, prefix: Option<&str>) -> Result<()> {
|
||||
let data_str = with_encryption(|| async { serde_yaml_ng::to_string(data) }).await?;
|
||||
|
||||
let yaml_str = match prefix {
|
||||
@@ -82,10 +73,9 @@ pub async fn save_yaml<T: Serialize + Sync>(
|
||||
}
|
||||
|
||||
const ALPHABET: [char; 62] = [
|
||||
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i',
|
||||
'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B',
|
||||
'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U',
|
||||
'V', 'W', 'X', 'Y', 'Z',
|
||||
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
|
||||
'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J',
|
||||
'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
|
||||
];
|
||||
|
||||
/// generate the uid
|
||||
|
||||
@@ -50,11 +50,9 @@ fn resolve_supported_language(language: &str) -> Option<String> {
|
||||
|
||||
let supported = supported_languages_internal();
|
||||
|
||||
candidates.into_iter().find(|candidate| {
|
||||
supported
|
||||
.iter()
|
||||
.any(|&lang| lang.eq_ignore_ascii_case(candidate))
|
||||
})
|
||||
candidates
|
||||
.into_iter()
|
||||
.find(|candidate| supported.iter().any(|&lang| lang.eq_ignore_ascii_case(candidate)))
|
||||
}
|
||||
|
||||
fn system_language() -> String {
|
||||
@@ -72,8 +70,7 @@ pub fn get_supported_languages() -> Vec<String> {
|
||||
}
|
||||
|
||||
pub fn set_locale(language: &str) {
|
||||
let lang =
|
||||
resolve_supported_language(language).unwrap_or_else(|| fallback_language().to_string());
|
||||
let lang = resolve_supported_language(language).unwrap_or_else(|| fallback_language().to_string());
|
||||
rust_i18n::set_locale(&lang);
|
||||
}
|
||||
|
||||
|
||||
@@ -223,14 +223,8 @@ async fn init_dns_config() -> Result<()> {
|
||||
("enable".into(), Value::Bool(true)),
|
||||
("listen".into(), Value::String(":53".into())),
|
||||
("enhanced-mode".into(), Value::String("fake-ip".into())),
|
||||
(
|
||||
"fake-ip-range".into(),
|
||||
Value::String("198.18.0.1/16".into()),
|
||||
),
|
||||
(
|
||||
"fake-ip-filter-mode".into(),
|
||||
Value::String("blacklist".into()),
|
||||
),
|
||||
("fake-ip-range".into(), Value::String("198.18.0.1/16".into())),
|
||||
("fake-ip-filter-mode".into(), Value::String("blacklist".into())),
|
||||
("prefer-h3".into(), Value::Bool(false)),
|
||||
("respect-rules".into(), Value::Bool(false)),
|
||||
("use-hosts".into(), Value::Bool(false)),
|
||||
@@ -310,10 +304,7 @@ async fn init_dns_config() -> Result<()> {
|
||||
// 获取默认DNS和host配置
|
||||
let default_dns_config = serde_yaml_ng::Mapping::from_iter([
|
||||
("dns".into(), Value::Mapping(dns_config)),
|
||||
(
|
||||
"hosts".into(),
|
||||
Value::Mapping(serde_yaml_ng::Mapping::new()),
|
||||
),
|
||||
("hosts".into(), Value::Mapping(serde_yaml_ng::Mapping::new())),
|
||||
]);
|
||||
|
||||
// 检查DNS配置文件是否存在
|
||||
@@ -322,12 +313,7 @@ async fn init_dns_config() -> Result<()> {
|
||||
|
||||
if !dns_path.exists() {
|
||||
logging!(info, Type::Setup, "Creating default DNS config file");
|
||||
help::save_yaml(
|
||||
&dns_path,
|
||||
&default_dns_config,
|
||||
Some("# Clash Verge DNS Config"),
|
||||
)
|
||||
.await?;
|
||||
help::save_yaml(&dns_path, &default_dns_config, Some("# Clash Verge DNS Config")).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -343,9 +329,9 @@ async fn ensure_directories() -> Result<()> {
|
||||
|
||||
for (name, dir) in directories {
|
||||
if !dir.exists() {
|
||||
fs::create_dir_all(&dir).await.map_err(|e| {
|
||||
anyhow::anyhow!("Failed to create {} directory {:?}: {}", name, dir, e)
|
||||
})?;
|
||||
fs::create_dir_all(&dir)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to create {} directory {:?}: {}", name, dir, e))?;
|
||||
logging!(info, Type::Setup, "Created {} directory: {:?}", name, dir);
|
||||
}
|
||||
}
|
||||
@@ -535,10 +521,7 @@ pub async fn startup_script() -> Result<()> {
|
||||
} else if script_path.ends_with(".ps1") || script_path.ends_with(".bat") {
|
||||
"powershell"
|
||||
} else {
|
||||
return Err(anyhow::anyhow!(
|
||||
"unsupported script extension: {}",
|
||||
script_path
|
||||
));
|
||||
return Err(anyhow::anyhow!("unsupported script extension: {}", script_path));
|
||||
};
|
||||
|
||||
let script_dir = PathBuf::from(script_path.as_str());
|
||||
|
||||
@@ -79,15 +79,9 @@ struct SessionEnv {
|
||||
|
||||
impl SessionEnv {
|
||||
fn gather() -> Self {
|
||||
let desktop_env = env::var("XDG_CURRENT_DESKTOP")
|
||||
.unwrap_or_default()
|
||||
.to_uppercase();
|
||||
let session_desktop = env::var("XDG_SESSION_DESKTOP")
|
||||
.unwrap_or_default()
|
||||
.to_uppercase();
|
||||
let desktop_session = env::var("DESKTOP_SESSION")
|
||||
.unwrap_or_default()
|
||||
.to_uppercase();
|
||||
let desktop_env = env::var("XDG_CURRENT_DESKTOP").unwrap_or_default().to_uppercase();
|
||||
let session_desktop = env::var("XDG_SESSION_DESKTOP").unwrap_or_default().to_uppercase();
|
||||
let desktop_session = env::var("DESKTOP_SESSION").unwrap_or_default().to_uppercase();
|
||||
|
||||
let is_kde_plasma = desktop_env.contains("KDE")
|
||||
|| session_desktop.contains("KDE")
|
||||
@@ -95,9 +89,8 @@ impl SessionEnv {
|
||||
|| desktop_env.contains("PLASMA")
|
||||
|| session_desktop.contains("PLASMA")
|
||||
|| desktop_session.contains("PLASMA");
|
||||
let is_hyprland = desktop_env.contains("HYPR")
|
||||
|| session_desktop.contains("HYPR")
|
||||
|| desktop_session.contains("HYPR");
|
||||
let is_hyprland =
|
||||
desktop_env.contains("HYPR") || session_desktop.contains("HYPR") || desktop_session.contains("HYPR");
|
||||
let is_wayland = env::var("XDG_SESSION_TYPE")
|
||||
.map(|value| value.eq_ignore_ascii_case("wayland"))
|
||||
.unwrap_or(false)
|
||||
@@ -128,13 +121,14 @@ struct DmabufOverrides {
|
||||
|
||||
impl DmabufOverrides {
|
||||
fn gather() -> Self {
|
||||
let user_preference = env::var("CLASH_VERGE_DMABUF").ok().and_then(|value| {
|
||||
match value.trim().to_ascii_lowercase().as_str() {
|
||||
"1" | "true" | "enable" | "on" => Some(true),
|
||||
"0" | "false" | "disable" | "off" => Some(false),
|
||||
_ => None,
|
||||
}
|
||||
});
|
||||
let user_preference =
|
||||
env::var("CLASH_VERGE_DMABUF")
|
||||
.ok()
|
||||
.and_then(|value| match value.trim().to_ascii_lowercase().as_str() {
|
||||
"1" | "true" | "enable" | "on" => Some(true),
|
||||
"0" | "false" | "disable" | "off" => Some(false),
|
||||
_ => None,
|
||||
});
|
||||
let dmabuf_override = env::var("WEBKIT_DISABLE_DMABUF_RENDERER").ok();
|
||||
|
||||
Self {
|
||||
@@ -185,13 +179,11 @@ impl DmabufDecision {
|
||||
match overrides.user_preference {
|
||||
Some(true) => {
|
||||
decision.enable_dmabuf = true;
|
||||
decision.message =
|
||||
Some("CLASH_VERGE_DMABUF=1: 强制启用 WebKit DMABUF 渲染。".into());
|
||||
decision.message = Some("CLASH_VERGE_DMABUF=1: 强制启用 WebKit DMABUF 渲染。".into());
|
||||
}
|
||||
Some(false) => {
|
||||
decision.enable_dmabuf = false;
|
||||
decision.message =
|
||||
Some("CLASH_VERGE_DMABUF=0: 强制禁用 WebKit DMABUF 渲染。".into());
|
||||
decision.message = Some("CLASH_VERGE_DMABUF=0: 强制禁用 WebKit DMABUF 渲染。".into());
|
||||
if session.is_wayland && !session.prefer_native_wayland {
|
||||
decision.force_x11_backend = true;
|
||||
}
|
||||
@@ -200,10 +192,8 @@ impl DmabufDecision {
|
||||
if overrides.has_env_override() {
|
||||
if overrides.dmabuf_override.as_deref() == Some("1") {
|
||||
decision.enable_dmabuf = false;
|
||||
decision.message = Some(
|
||||
"检测到 WEBKIT_DISABLE_DMABUF_RENDERER=1,沿用用户的软件渲染配置。"
|
||||
.into(),
|
||||
);
|
||||
decision.message =
|
||||
Some("检测到 WEBKIT_DISABLE_DMABUF_RENDERER=1,沿用用户的软件渲染配置。".into());
|
||||
if session.is_wayland && !session.prefer_native_wayland {
|
||||
decision.force_x11_backend = true;
|
||||
}
|
||||
@@ -230,14 +220,18 @@ impl DmabufDecision {
|
||||
})
|
||||
.unwrap_or_else(|| String::from("NVIDIA Open Kernel Module"));
|
||||
let message = match reason {
|
||||
NvidiaDmabufDisableReason::PrimaryOpenKernelModule => format!(
|
||||
"Wayland 会话检测到 {}:禁用 WebKit DMABUF 渲染以规避协议错误。",
|
||||
summary
|
||||
),
|
||||
NvidiaDmabufDisableReason::MissingBootVga => format!(
|
||||
"Wayland 会话检测到 {},但缺少 boot_vga 信息:预防性禁用 WebKit DMABUF。",
|
||||
summary
|
||||
),
|
||||
NvidiaDmabufDisableReason::PrimaryOpenKernelModule => {
|
||||
format!(
|
||||
"Wayland 会话检测到 {}:禁用 WebKit DMABUF 渲染以规避协议错误。",
|
||||
summary
|
||||
)
|
||||
}
|
||||
NvidiaDmabufDisableReason::MissingBootVga => {
|
||||
format!(
|
||||
"Wayland 会话检测到 {},但缺少 boot_vga 信息:预防性禁用 WebKit DMABUF。",
|
||||
summary
|
||||
)
|
||||
}
|
||||
NvidiaDmabufDisableReason::PreferNativeWayland => format!(
|
||||
"Wayland ({}) + {}:检测到 NVIDIA Open Kernel Module 在辅 GPU 上运行,预防性禁用 WebKit DMABUF。",
|
||||
session.compositor_label, summary
|
||||
@@ -266,12 +260,9 @@ impl DmabufDecision {
|
||||
);
|
||||
}
|
||||
} else if session.is_wayland {
|
||||
decision.message = Some(
|
||||
"Wayland 会话未匹配受支持的合成器:禁用 WebKit DMABUF 渲染。".into(),
|
||||
);
|
||||
decision.message = Some("Wayland 会话未匹配受支持的合成器:禁用 WebKit DMABUF 渲染。".into());
|
||||
} else {
|
||||
decision.message =
|
||||
Some("禁用 WebKit DMABUF 渲染以获得更稳定的输出。".into());
|
||||
decision.message = Some("禁用 WebKit DMABUF 渲染以获得更稳定的输出。".into());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -433,13 +424,7 @@ fn read_nvidia_driver_summary() -> Option<String> {
|
||||
.map(|line| line.trim().to_string())
|
||||
.filter(|line| !line.is_empty()),
|
||||
Err(err) => {
|
||||
logging!(
|
||||
info,
|
||||
Type::Setup,
|
||||
"读取 {} 失败:{}",
|
||||
NVIDIA_VERSION_PATH,
|
||||
err
|
||||
);
|
||||
logging!(info, Type::Setup, "读取 {} 失败:{}", NVIDIA_VERSION_PATH, err);
|
||||
None
|
||||
}
|
||||
}
|
||||
@@ -639,10 +624,7 @@ fn flush_section(
|
||||
|
||||
if let Some(&index) = seen.get(scheme) {
|
||||
let existing_line = &mut processed[index];
|
||||
let existing_prefix: String = existing_line
|
||||
.chars()
|
||||
.take_while(|c| c.is_whitespace())
|
||||
.collect();
|
||||
let existing_prefix: String = existing_line.chars().take_while(|c| c.is_whitespace()).collect();
|
||||
let Some((_, existing_raw_value)) = existing_line.trim().split_once('=') else {
|
||||
processed.push(line);
|
||||
continue;
|
||||
@@ -695,10 +677,7 @@ fn flush_section(
|
||||
*changed = true;
|
||||
}
|
||||
|
||||
let prefix = line
|
||||
.chars()
|
||||
.take_while(|c| c.is_whitespace())
|
||||
.collect::<String>();
|
||||
let prefix = line.chars().take_while(|c| c.is_whitespace()).collect::<String>();
|
||||
let mut new_line = format!("{prefix}x-scheme-handler/{scheme}=");
|
||||
new_line.push_str(&values.join(";"));
|
||||
new_line.push(';');
|
||||
@@ -716,10 +695,7 @@ fn flush_section(
|
||||
processed.push(line);
|
||||
}
|
||||
|
||||
let ensure_all = matches!(
|
||||
kind,
|
||||
SectionKind::DefaultApplications | SectionKind::AddedAssociations
|
||||
);
|
||||
let ensure_all = matches!(kind, SectionKind::DefaultApplications | SectionKind::AddedAssociations);
|
||||
|
||||
if ensure_all {
|
||||
for &scheme in schemes {
|
||||
|
||||
@@ -20,11 +20,7 @@ pub struct HttpResponse {
|
||||
|
||||
impl HttpResponse {
|
||||
pub const fn new(status: StatusCode, headers: HeaderMap, body: String) -> Self {
|
||||
Self {
|
||||
status,
|
||||
headers,
|
||||
body,
|
||||
}
|
||||
Self { status, headers, body }
|
||||
}
|
||||
|
||||
pub const fn status(&self) -> StatusCode {
|
||||
@@ -208,10 +204,7 @@ impl NetworkManager {
|
||||
{
|
||||
let auth_str = format!("{}:{}", parsed.username(), pass);
|
||||
let encoded = general_purpose::STANDARD.encode(auth_str);
|
||||
extra_headers.insert(
|
||||
"Authorization",
|
||||
HeaderValue::from_str(&format!("Basic {}", encoded))?,
|
||||
);
|
||||
extra_headers.insert("Authorization", HeaderValue::from_str(&format!("Basic {}", encoded))?);
|
||||
}
|
||||
|
||||
let clean_url = {
|
||||
@@ -235,8 +228,7 @@ impl NetworkManager {
|
||||
let response = match request_builder.send().await {
|
||||
Ok(resp) => resp,
|
||||
Err(e) => {
|
||||
self.record_connection_error(&format!("Request failed: {}", e))
|
||||
.await;
|
||||
self.record_connection_error(&format!("Request failed: {}", e)).await;
|
||||
return Err(anyhow::anyhow!("Request failed: {}", e));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -17,13 +17,7 @@ pub enum NotificationEvent<'a> {
|
||||
|
||||
fn notify(title: &str, body: &str) {
|
||||
let app_handle = handle::Handle::app_handle();
|
||||
app_handle
|
||||
.notification()
|
||||
.builder()
|
||||
.title(title)
|
||||
.body(body)
|
||||
.show()
|
||||
.ok();
|
||||
app_handle.notification().builder().title(title).body(body).show().ok();
|
||||
}
|
||||
|
||||
pub async fn notify_event<'a>(event: NotificationEvent<'a>) {
|
||||
|
||||
@@ -9,12 +9,7 @@ pub async fn set_public_dns(dns_server: String) {
|
||||
let resource_dir = match dirs::app_resources_dir() {
|
||||
Ok(dir) => dir,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Config,
|
||||
"Failed to get resource directory: {}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Config, "Failed to get resource directory: {}", e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
@@ -55,12 +50,7 @@ pub async fn restore_public_dns() {
|
||||
let resource_dir = match dirs::app_resources_dir() {
|
||||
Ok(dir) => dir,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Config,
|
||||
"Failed to get resource directory: {}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Config, "Failed to get resource directory: {}", e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -56,12 +56,7 @@ pub fn resolve_setup_sync() {
|
||||
|
||||
pub fn resolve_setup_async() {
|
||||
AsyncHandler::spawn(|| async {
|
||||
logging!(
|
||||
info,
|
||||
Type::ClashVergeRev,
|
||||
"Version: {}",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
);
|
||||
logging!(info, Type::ClashVergeRev, "Version: {}", env!("CARGO_PKG_VERSION"));
|
||||
|
||||
futures::join!(init_work_config(), init_resources(), init_startup_script());
|
||||
|
||||
@@ -185,10 +180,7 @@ pub(super) async fn init_core_manager() {
|
||||
}
|
||||
|
||||
pub(super) async fn init_system_proxy() {
|
||||
logging_error!(
|
||||
Type::Setup,
|
||||
sysopt::Sysopt::global().update_sysproxy().await
|
||||
);
|
||||
logging_error!(Type::Setup, sysopt::Sysopt::global().update_sysproxy().await);
|
||||
}
|
||||
|
||||
pub(super) async fn init_system_proxy_guard() {
|
||||
@@ -200,11 +192,7 @@ pub(super) async fn refresh_tray_menu() {
|
||||
}
|
||||
|
||||
pub(super) async fn init_window() {
|
||||
let is_silent_start = Config::verge()
|
||||
.await
|
||||
.data_arc()
|
||||
.enable_silent_start
|
||||
.unwrap_or(false);
|
||||
let is_silent_start = Config::verge().await.data_arc().enable_silent_start.unwrap_or(false);
|
||||
#[cfg(target_os = "macos")]
|
||||
if is_silent_start {
|
||||
use crate::core::handle::Handle;
|
||||
|
||||
@@ -30,50 +30,39 @@ pub(super) async fn resolve_scheme(param: &str) -> Result<()> {
|
||||
}
|
||||
};
|
||||
|
||||
let (url_param, name) =
|
||||
if link_parsed.scheme() == "clash" || link_parsed.scheme() == "clash-verge" {
|
||||
let name_owned: Option<String> = link_parsed
|
||||
.query_pairs()
|
||||
.find(|(key, _)| key == "name")
|
||||
.map(|(_, value)| value.into_owned().into());
|
||||
let (url_param, name) = if link_parsed.scheme() == "clash" || link_parsed.scheme() == "clash-verge" {
|
||||
let name_owned: Option<String> = link_parsed
|
||||
.query_pairs()
|
||||
.find(|(key, _)| key == "name")
|
||||
.map(|(_, value)| value.into_owned().into());
|
||||
|
||||
let url_param = if let Some(query) = link_parsed.query() {
|
||||
let prefix = "url=";
|
||||
if let Some(pos) = query.find(prefix) {
|
||||
let raw_url = &query[pos + prefix.len()..];
|
||||
Some(percent_decode_str(raw_url).decode_utf8_lossy().to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
let url_param = if let Some(query) = link_parsed.query() {
|
||||
let prefix = "url=";
|
||||
if let Some(pos) = query.find(prefix) {
|
||||
let raw_url = &query[pos + prefix.len()..];
|
||||
Some(percent_decode_str(raw_url).decode_utf8_lossy().to_string())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
(url_param, name_owned)
|
||||
}
|
||||
} else {
|
||||
(None, None)
|
||||
None
|
||||
};
|
||||
(url_param, name_owned)
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
let url = if let Some(ref url) = url_param {
|
||||
url
|
||||
} else {
|
||||
logging!(
|
||||
error,
|
||||
Type::Config,
|
||||
"missing url parameter in deep link: {}",
|
||||
param_str
|
||||
);
|
||||
logging!(error, Type::Config, "missing url parameter in deep link: {}", param_str);
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let mut item = match PrfItem::from_url(url, name.as_ref(), None, None).await {
|
||||
Ok(item) => item,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Config,
|
||||
"failed to parse profile from url: {:?}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Config, "failed to parse profile from url: {:?}", e);
|
||||
handle::Handle::notice_message("import_sub_url::error", e.to_string());
|
||||
return Ok(());
|
||||
}
|
||||
@@ -94,12 +83,7 @@ pub(super) async fn resolve_scheme(param: &str) -> Result<()> {
|
||||
handle::Handle::notify_profile_changed(uid);
|
||||
}
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Config,
|
||||
"failed to import subscription url: {:?}",
|
||||
e
|
||||
);
|
||||
logging!(error, Type::Config, "failed to import subscription url: {:?}", e);
|
||||
Config::profiles().await.discard();
|
||||
handle::Handle::notice_message("import_sub_url::error", e.to_string());
|
||||
return Ok(());
|
||||
|
||||
@@ -52,11 +52,7 @@ pub async fn build_new_window() -> Result<WebviewWindow, String> {
|
||||
LIGHT_BACKGROUND_COLOR
|
||||
};
|
||||
|
||||
let initial_script = build_window_initial_script(
|
||||
initial_theme_mode,
|
||||
DARK_BACKGROUND_HEX,
|
||||
LIGHT_BACKGROUND_HEX,
|
||||
);
|
||||
let initial_script = build_window_initial_script(initial_theme_mode, DARK_BACKGROUND_HEX, LIGHT_BACKGROUND_HEX);
|
||||
|
||||
let mut builder = tauri::WebviewWindowBuilder::new(
|
||||
app_handle,
|
||||
@@ -81,10 +77,7 @@ pub async fn build_new_window() -> Result<WebviewWindow, String> {
|
||||
|
||||
match builder.build() {
|
||||
Ok(window) => {
|
||||
logging_error!(
|
||||
Type::Window,
|
||||
window.set_background_color(Some(background_color))
|
||||
);
|
||||
logging_error!(Type::Window, window.set_background_color(Some(background_color)));
|
||||
logging_error!(Type::Window, window.eval(INITIAL_LOADING_OVERLAY));
|
||||
Ok(window)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
pub fn build_window_initial_script(
|
||||
initial_theme_mode: &str,
|
||||
dark_background: &str,
|
||||
light_background: &str,
|
||||
) -> String {
|
||||
pub fn build_window_initial_script(initial_theme_mode: &str, dark_background: &str, light_background: &str) -> String {
|
||||
let theme_mode = match initial_theme_mode {
|
||||
"dark" => "dark",
|
||||
"light" => "light",
|
||||
|
||||
@@ -28,9 +28,7 @@ static SHUTDOWN_SENDER: OnceCell<Mutex<Option<oneshot::Sender<()>>>> = OnceCell:
|
||||
pub async fn check_singleton() -> Result<()> {
|
||||
let port = IVerge::get_singleton_port();
|
||||
if !local_port_available(port) {
|
||||
let client = ClientBuilder::new()
|
||||
.timeout(Duration::from_millis(500))
|
||||
.build()?;
|
||||
let client = ClientBuilder::new().timeout(Duration::from_millis(500)).build()?;
|
||||
// 需要确保 Send
|
||||
#[allow(clippy::needless_collect)]
|
||||
let argvs: Vec<std::string::String> = std::env::args().collect();
|
||||
@@ -40,9 +38,7 @@ pub async fn check_singleton() -> Result<()> {
|
||||
let param = argvs[1].as_str();
|
||||
if param.starts_with("clash:") {
|
||||
client
|
||||
.get(format!(
|
||||
"http://127.0.0.1:{port}/commands/scheme?param={param}"
|
||||
))
|
||||
.get(format!("http://127.0.0.1:{port}/commands/scheme?param={param}"))
|
||||
.send()
|
||||
.await?;
|
||||
}
|
||||
@@ -53,11 +49,7 @@ pub async fn check_singleton() -> Result<()> {
|
||||
.send()
|
||||
.await?;
|
||||
}
|
||||
logging!(
|
||||
error,
|
||||
Type::Window,
|
||||
"failed to setup singleton listen server"
|
||||
);
|
||||
logging!(error, Type::Window, "failed to setup singleton listen server");
|
||||
bail!("app exists");
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -55,11 +55,7 @@ fn get_window_operation_debounce() -> &'static Mutex<Instant> {
|
||||
|
||||
fn should_handle_window_operation() -> bool {
|
||||
if WINDOW_OPERATION_IN_PROGRESS.load(Ordering::Acquire) {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Window,
|
||||
"Warning: [防抖] 窗口操作已在进行中,跳过重复调用"
|
||||
);
|
||||
logging!(warn, Type::Window, "Warning: [防抖] 窗口操作已在进行中,跳过重复调用");
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -328,12 +324,7 @@ impl WindowManager {
|
||||
/// 创建新窗口,防抖避免重复调用
|
||||
pub fn create_window(is_show: bool) -> Pin<Box<dyn Future<Output = bool> + Send>> {
|
||||
Box::pin(async move {
|
||||
logging!(
|
||||
info,
|
||||
Type::Window,
|
||||
"开始创建/显示主窗口, is_show={}",
|
||||
is_show
|
||||
);
|
||||
logging!(info, Type::Window, "开始创建/显示主窗口, is_show={}", is_show);
|
||||
|
||||
if !is_show {
|
||||
return false;
|
||||
@@ -383,8 +374,6 @@ impl WindowManager {
|
||||
let is_focused = Self::is_main_window_focused();
|
||||
let is_minimized = Self::is_main_window_minimized();
|
||||
|
||||
format!(
|
||||
"窗口状态: {state:?} | 可见: {is_visible} | 有焦点: {is_focused} | 最小化: {is_minimized}"
|
||||
)
|
||||
format!("窗口状态: {state:?} | 可见: {is_visible} | 有焦点: {is_focused} | 最小化: {is_minimized}")
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user