Files
clash-verge-rev/src-tauri/src/config/prfitem.rs
Tunglies 537d27d10b fix: clippy errors with new config (#4428)
* refactor: improve code quality with clippy fixes and standardized logging

- Replace dangerous unwrap()/expect() calls with proper error handling
- Standardize logging from log:: to logging\! macro with Type:: classifications
- Fix app handle panics with graceful fallback patterns
- Improve error resilience across 35+ modules without breaking functionality
- Reduce clippy warnings from 300+ to 0 in main library code

* chore: update Cargo.toml configuration

* refactor: resolve all clippy warnings
- Fix Arc clone warnings using explicit Arc::clone syntax across 9 files
- Add #[allow(clippy::expect_used)] to test functions for appropriate expect usage
- Remove no-effect statements from debug code cleanup
- Apply clippy auto-fixes for dbg\! macro removals and path statements
- Achieve zero clippy warnings on all targets with -D warnings flag

* chore: update Cargo.toml clippy configuration

* refactor: simplify macOS job configuration and improve caching

* refactor: remove unnecessary async/await from service and proxy functions

* refactor: streamline pnpm installation in CI configuration

* refactor: simplify error handling and remove unnecessary else statements

* refactor: replace async/await with synchronous locks for core management

* refactor: add workflow_dispatch trigger to clippy job

* refactor: convert async functions to synchronous for service management

* refactor: convert async functions to synchronous for UWP tool invocation

* fix: change wrong logging

* refactor: convert proxy restoration functions to async

* Revert "refactor: convert proxy restoration functions to async"

This reverts commit b82f5d250b.

* refactor: update proxy restoration functions to return Result types

* fix: handle errors during proxy restoration and update async function signatures

* fix: handle errors during proxy restoration and update async function signatures

* refactor: update restore_pac_proxy and restore_sys_proxy functions to async

* fix: convert restore_pac_proxy and restore_sys_proxy functions to async

* fix: await restore_sys_proxy calls in proxy restoration logic

* fix: suppress clippy warnings for unused async functions in proxy restoration

* fix: suppress clippy warnings for unused async functions in proxy restoration
2025-08-18 02:02:25 +08:00

573 lines
18 KiB
Rust

use crate::utils::{
dirs, help,
network::{NetworkManager, ProxyType},
tmpl,
};
use anyhow::{bail, Context, Result};
use reqwest::StatusCode;
use serde::{Deserialize, Serialize};
use serde_yaml::Mapping;
use std::{fs, time::Duration};
use super::Config;
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
pub struct PrfItem {
pub uid: Option<String>,
/// profile item type
/// enum value: remote | local | script | merge
#[serde(rename = "type")]
pub itype: Option<String>,
/// profile name
pub name: Option<String>,
/// profile file
pub file: Option<String>,
/// profile description
#[serde(skip_serializing_if = "Option::is_none")]
pub desc: Option<String>,
/// source url
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
/// selected information
#[serde(skip_serializing_if = "Option::is_none")]
pub selected: Option<Vec<PrfSelected>>,
/// subscription user info
#[serde(skip_serializing_if = "Option::is_none")]
pub extra: Option<PrfExtra>,
/// updated time
pub updated: Option<usize>,
/// some options of the item
#[serde(skip_serializing_if = "Option::is_none")]
pub option: Option<PrfOption>,
/// profile web page url
#[serde(skip_serializing_if = "Option::is_none")]
pub home: Option<String>,
/// the file data
#[serde(skip)]
pub file_data: Option<String>,
}
#[derive(Default, Debug, Clone, Deserialize, Serialize)]
pub struct PrfSelected {
pub name: Option<String>,
pub now: Option<String>,
}
#[derive(Default, Debug, Clone, Copy, Deserialize, Serialize)]
pub struct PrfExtra {
pub upload: u64,
pub download: u64,
pub total: u64,
pub expire: u64,
}
#[derive(Default, Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
pub struct PrfOption {
/// for `remote` profile's http request
/// see issue #13
#[serde(skip_serializing_if = "Option::is_none")]
pub user_agent: Option<String>,
/// for `remote` profile
/// use system proxy
#[serde(skip_serializing_if = "Option::is_none")]
pub with_proxy: Option<bool>,
/// for `remote` profile
/// use self proxy
#[serde(skip_serializing_if = "Option::is_none")]
pub self_proxy: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub update_interval: Option<u64>,
/// for `remote` profile
/// HTTP request timeout in seconds
/// default is 60 seconds
#[serde(skip_serializing_if = "Option::is_none")]
pub timeout_seconds: Option<u64>,
/// for `remote` profile
/// disable certificate validation
/// default is `false`
#[serde(skip_serializing_if = "Option::is_none")]
pub danger_accept_invalid_certs: Option<bool>,
pub merge: Option<String>,
pub script: Option<String>,
pub rules: Option<String>,
pub proxies: Option<String>,
pub groups: Option<String>,
}
impl PrfOption {
pub fn merge(one: Option<Self>, other: Option<Self>) -> Option<Self> {
match (one, other) {
(Some(mut a), Some(b)) => {
a.user_agent = b.user_agent.or(a.user_agent);
a.with_proxy = b.with_proxy.or(a.with_proxy);
a.self_proxy = b.self_proxy.or(a.self_proxy);
a.danger_accept_invalid_certs = b
.danger_accept_invalid_certs
.or(a.danger_accept_invalid_certs);
a.update_interval = b.update_interval.or(a.update_interval);
a.merge = b.merge.or(a.merge);
a.script = b.script.or(a.script);
a.rules = b.rules.or(a.rules);
a.proxies = b.proxies.or(a.proxies);
a.groups = b.groups.or(a.groups);
a.timeout_seconds = b.timeout_seconds.or(a.timeout_seconds);
Some(a)
}
t => t.0.or(t.1),
}
}
}
impl PrfItem {
/// From partial item
/// must contain `itype`
pub async fn from(item: PrfItem, file_data: Option<String>) -> Result<PrfItem> {
if item.itype.is_none() {
bail!("type should not be null");
}
let itype = item
.itype
.ok_or_else(|| anyhow::anyhow!("type should not be null"))?;
match itype.as_str() {
"remote" => {
let url = item
.url
.as_ref()
.ok_or_else(|| anyhow::anyhow!("url should not be null"))?;
let name = item.name;
let desc = item.desc;
PrfItem::from_url(url, name, desc, item.option).await
}
"local" => {
let name = item.name.unwrap_or("Local File".into());
let desc = item.desc.unwrap_or("".into());
PrfItem::from_local(name, desc, file_data, item.option)
}
typ => bail!("invalid profile item type \"{typ}\""),
}
}
/// ## Local type
/// create a new item from name/desc
pub fn from_local(
name: String,
desc: String,
file_data: Option<String>,
option: Option<PrfOption>,
) -> Result<PrfItem> {
let uid = help::get_uid("L");
let file = format!("{uid}.yaml");
let opt_ref = option.as_ref();
let update_interval = opt_ref.and_then(|o| o.update_interval);
let mut merge = opt_ref.and_then(|o| o.merge.clone());
let mut script = opt_ref.and_then(|o| o.script.clone());
let mut rules = opt_ref.and_then(|o| o.rules.clone());
let mut proxies = opt_ref.and_then(|o| o.proxies.clone());
let mut groups = opt_ref.and_then(|o| o.groups.clone());
if merge.is_none() {
let merge_item = PrfItem::from_merge(None)?;
Config::profiles()
.data_mut()
.append_item(merge_item.clone())?;
merge = merge_item.uid;
}
if script.is_none() {
let script_item = PrfItem::from_script(None)?;
Config::profiles()
.data_mut()
.append_item(script_item.clone())?;
script = script_item.uid;
}
if rules.is_none() {
let rules_item = PrfItem::from_rules()?;
Config::profiles()
.data_mut()
.append_item(rules_item.clone())?;
rules = rules_item.uid;
}
if proxies.is_none() {
let proxies_item = PrfItem::from_proxies()?;
Config::profiles()
.data_mut()
.append_item(proxies_item.clone())?;
proxies = proxies_item.uid;
}
if groups.is_none() {
let groups_item = PrfItem::from_groups()?;
Config::profiles()
.data_mut()
.append_item(groups_item.clone())?;
groups = groups_item.uid;
}
Ok(PrfItem {
uid: Some(uid),
itype: Some("local".into()),
name: Some(name),
desc: Some(desc),
file: Some(file),
url: None,
selected: None,
extra: None,
option: Some(PrfOption {
update_interval,
merge,
script,
rules,
proxies,
groups,
..PrfOption::default()
}),
home: None,
updated: Some(chrono::Local::now().timestamp() as usize),
file_data: Some(file_data.unwrap_or(tmpl::ITEM_LOCAL.into())),
})
}
/// ## Remote type
/// create a new item from url
pub async fn from_url(
url: &str,
name: Option<String>,
desc: Option<String>,
option: Option<PrfOption>,
) -> Result<PrfItem> {
let opt_ref = option.as_ref();
let with_proxy = opt_ref.is_some_and(|o| o.with_proxy.unwrap_or(false));
let self_proxy = opt_ref.is_some_and(|o| o.self_proxy.unwrap_or(false));
let accept_invalid_certs =
opt_ref.is_some_and(|o| o.danger_accept_invalid_certs.unwrap_or(false));
let user_agent = opt_ref.and_then(|o| o.user_agent.clone());
let update_interval = opt_ref.and_then(|o| o.update_interval);
let timeout = opt_ref.and_then(|o| o.timeout_seconds).unwrap_or(20);
let mut merge = opt_ref.and_then(|o| o.merge.clone());
let mut script = opt_ref.and_then(|o| o.script.clone());
let mut rules = opt_ref.and_then(|o| o.rules.clone());
let mut proxies = opt_ref.and_then(|o| o.proxies.clone());
let mut groups = opt_ref.and_then(|o| o.groups.clone());
// 选择代理类型
let proxy_type = if self_proxy {
ProxyType::Localhost
} else if with_proxy {
ProxyType::System
} else {
ProxyType::None
};
// 使用网络管理器发送请求
let resp = match NetworkManager::global()
.get_with_interrupt(
url,
proxy_type,
Some(timeout),
user_agent.clone(),
accept_invalid_certs,
)
.await
{
Ok(r) => r,
Err(e) => {
tokio::time::sleep(Duration::from_millis(100)).await;
bail!("failed to fetch remote profile: {}", e);
}
};
let status_code = resp.status();
if !StatusCode::is_success(&status_code) {
bail!("failed to fetch remote profile with status {status_code}")
}
let header = resp.headers();
// parse the Subscription UserInfo
let extra = match header.get("Subscription-Userinfo") {
Some(value) => {
let sub_info = value.to_str().unwrap_or("");
Some(PrfExtra {
upload: help::parse_str(sub_info, "upload").unwrap_or(0),
download: help::parse_str(sub_info, "download").unwrap_or(0),
total: help::parse_str(sub_info, "total").unwrap_or(0),
expire: help::parse_str(sub_info, "expire").unwrap_or(0),
})
}
None => None,
};
// parse the Content-Disposition
let filename = match header.get("Content-Disposition") {
Some(value) => {
let filename = format!("{value:?}");
let filename = filename.trim_matches('"');
match help::parse_str::<String>(filename, "filename*") {
Some(filename) => {
let iter = percent_encoding::percent_decode(filename.as_bytes());
let filename = iter.decode_utf8().unwrap_or_default();
filename.split("''").last().map(|s| s.to_string())
}
None => match help::parse_str::<String>(filename, "filename") {
Some(filename) => {
let filename = filename.trim_matches('"');
Some(filename.to_string())
}
None => None,
},
}
}
None => Some(
crate::utils::help::get_last_part_and_decode(url).unwrap_or("Remote File".into()),
),
};
let update_interval = match update_interval {
Some(val) => Some(val),
None => match header.get("profile-update-interval") {
Some(value) => match value.to_str().unwrap_or("").parse::<u64>() {
Ok(val) => Some(val * 60), // hour -> min
Err(_) => None,
},
None => None,
},
};
let home = match header.get("profile-web-page-url") {
Some(value) => {
let str_value = value.to_str().unwrap_or("");
Some(str_value.to_string())
}
None => None,
};
let uid = help::get_uid("R");
let file = format!("{uid}.yaml");
let name = name.unwrap_or(filename.unwrap_or("Remote File".into()));
let data = resp.text_with_charset("utf-8").await?;
// process the charset "UTF-8 with BOM"
let data = data.trim_start_matches('\u{feff}');
// check the data whether the valid yaml format
let yaml = serde_yaml::from_str::<Mapping>(data)
.context("the remote profile data is invalid yaml")?;
if !yaml.contains_key("proxies") && !yaml.contains_key("proxy-providers") {
bail!("profile does not contain `proxies` or `proxy-providers`");
}
if merge.is_none() {
let merge_item = PrfItem::from_merge(None)?;
Config::profiles()
.data_mut()
.append_item(merge_item.clone())?;
merge = merge_item.uid;
}
if script.is_none() {
let script_item = PrfItem::from_script(None)?;
Config::profiles()
.data_mut()
.append_item(script_item.clone())?;
script = script_item.uid;
}
if rules.is_none() {
let rules_item = PrfItem::from_rules()?;
Config::profiles()
.data_mut()
.append_item(rules_item.clone())?;
rules = rules_item.uid;
}
if proxies.is_none() {
let proxies_item = PrfItem::from_proxies()?;
Config::profiles()
.data_mut()
.append_item(proxies_item.clone())?;
proxies = proxies_item.uid;
}
if groups.is_none() {
let groups_item = PrfItem::from_groups()?;
Config::profiles()
.data_mut()
.append_item(groups_item.clone())?;
groups = groups_item.uid;
}
Ok(PrfItem {
uid: Some(uid),
itype: Some("remote".into()),
name: Some(name),
desc,
file: Some(file),
url: Some(url.into()),
selected: None,
extra,
option: Some(PrfOption {
update_interval,
merge,
script,
rules,
proxies,
groups,
..PrfOption::default()
}),
home,
updated: Some(chrono::Local::now().timestamp() as usize),
file_data: Some(data.into()),
})
}
/// ## Merge type (enhance)
/// create the enhanced item by using `merge` rule
pub fn from_merge(uid: Option<String>) -> Result<PrfItem> {
let mut id = help::get_uid("m");
let mut template = tmpl::ITEM_MERGE_EMPTY.into();
if let Some(uid) = uid {
id = uid;
template = tmpl::ITEM_MERGE.into();
}
let file = format!("{id}.yaml");
Ok(PrfItem {
uid: Some(id),
itype: Some("merge".into()),
name: None,
desc: None,
file: Some(file),
url: None,
selected: None,
extra: None,
option: None,
home: None,
updated: Some(chrono::Local::now().timestamp() as usize),
file_data: Some(template),
})
}
/// ## Script type (enhance)
/// create the enhanced item by using javascript quick.js
pub fn from_script(uid: Option<String>) -> Result<PrfItem> {
let mut id = help::get_uid("s");
if let Some(uid) = uid {
id = uid;
}
let file = format!("{id}.js"); // js ext
Ok(PrfItem {
uid: Some(id),
itype: Some("script".into()),
name: None,
desc: None,
file: Some(file),
url: None,
home: None,
selected: None,
extra: None,
option: None,
updated: Some(chrono::Local::now().timestamp() as usize),
file_data: Some(tmpl::ITEM_SCRIPT.into()),
})
}
/// ## Rules type (enhance)
pub fn from_rules() -> Result<PrfItem> {
let uid = help::get_uid("r");
let file = format!("{uid}.yaml"); // yaml ext
Ok(PrfItem {
uid: Some(uid),
itype: Some("rules".into()),
name: None,
desc: None,
file: Some(file),
url: None,
home: None,
selected: None,
extra: None,
option: None,
updated: Some(chrono::Local::now().timestamp() as usize),
file_data: Some(tmpl::ITEM_RULES.into()),
})
}
/// ## Proxies type (enhance)
pub fn from_proxies() -> Result<PrfItem> {
let uid = help::get_uid("p");
let file = format!("{uid}.yaml"); // yaml ext
Ok(PrfItem {
uid: Some(uid),
itype: Some("proxies".into()),
name: None,
desc: None,
file: Some(file),
url: None,
home: None,
selected: None,
extra: None,
option: None,
updated: Some(chrono::Local::now().timestamp() as usize),
file_data: Some(tmpl::ITEM_PROXIES.into()),
})
}
/// ## Groups type (enhance)
pub fn from_groups() -> Result<PrfItem> {
let uid = help::get_uid("g");
let file = format!("{uid}.yaml"); // yaml ext
Ok(PrfItem {
uid: Some(uid),
itype: Some("groups".into()),
name: None,
desc: None,
file: Some(file),
url: None,
home: None,
selected: None,
extra: None,
option: None,
updated: Some(chrono::Local::now().timestamp() as usize),
file_data: Some(tmpl::ITEM_GROUPS.into()),
})
}
/// get the file data
pub fn read_file(&self) -> Result<String> {
let file = self
.file
.clone()
.ok_or_else(|| anyhow::anyhow!("could not find the file"))?;
let path = dirs::app_profiles_dir()?.join(file);
fs::read_to_string(path).context("failed to read the file")
}
/// save the file data
pub fn save_file(&self, data: String) -> Result<()> {
let file = self
.file
.clone()
.ok_or_else(|| anyhow::anyhow!("could not find the file"))?;
let path = dirs::app_profiles_dir()?.join(file);
fs::write(path, data.as_bytes()).context("failed to save the file")
}
}