mirror of
https://github.com/clash-verge-rev/clash-verge-rev.git
synced 2026-01-29 08:45:41 +08:00
fix: clippy errors with new config (#4428)
* refactor: improve code quality with clippy fixes and standardized logging
- Replace dangerous unwrap()/expect() calls with proper error handling
- Standardize logging from log:: to logging\! macro with Type:: classifications
- Fix app handle panics with graceful fallback patterns
- Improve error resilience across 35+ modules without breaking functionality
- Reduce clippy warnings from 300+ to 0 in main library code
* chore: update Cargo.toml configuration
* refactor: resolve all clippy warnings
- Fix Arc clone warnings using explicit Arc::clone syntax across 9 files
- Add #[allow(clippy::expect_used)] to test functions for appropriate expect usage
- Remove no-effect statements from debug code cleanup
- Apply clippy auto-fixes for dbg\! macro removals and path statements
- Achieve zero clippy warnings on all targets with -D warnings flag
* chore: update Cargo.toml clippy configuration
* refactor: simplify macOS job configuration and improve caching
* refactor: remove unnecessary async/await from service and proxy functions
* refactor: streamline pnpm installation in CI configuration
* refactor: simplify error handling and remove unnecessary else statements
* refactor: replace async/await with synchronous locks for core management
* refactor: add workflow_dispatch trigger to clippy job
* refactor: convert async functions to synchronous for service management
* refactor: convert async functions to synchronous for UWP tool invocation
* fix: change wrong logging
* refactor: convert proxy restoration functions to async
* Revert "refactor: convert proxy restoration functions to async"
This reverts commit b82f5d250b.
* refactor: update proxy restoration functions to return Result types
* fix: handle errors during proxy restoration and update async function signatures
* fix: handle errors during proxy restoration and update async function signatures
* refactor: update restore_pac_proxy and restore_sys_proxy functions to async
* fix: convert restore_pac_proxy and restore_sys_proxy functions to async
* fix: await restore_sys_proxy calls in proxy restoration logic
* fix: suppress clippy warnings for unused async functions in proxy restoration
* fix: suppress clippy warnings for unused async functions in proxy restoration
This commit is contained in:
@@ -40,13 +40,37 @@ singleton_lazy!(NetworkManager, NETWORK_MANAGER, NetworkManager::new);
|
||||
impl NetworkManager {
|
||||
fn new() -> Self {
|
||||
// 创建专用的异步运行时,线程数限制为4个
|
||||
let runtime = Builder::new_multi_thread()
|
||||
let runtime = match Builder::new_multi_thread()
|
||||
.worker_threads(4)
|
||||
.thread_name("clash-verge-network")
|
||||
.enable_io()
|
||||
.enable_time()
|
||||
.build()
|
||||
.expect("Failed to create network runtime");
|
||||
{
|
||||
Ok(runtime) => runtime,
|
||||
Err(e) => {
|
||||
log::error!(
|
||||
"Failed to create network runtime: {}. Using fallback single-threaded runtime.",
|
||||
e
|
||||
);
|
||||
// Fallback to current thread runtime
|
||||
match Builder::new_current_thread()
|
||||
.enable_io()
|
||||
.enable_time()
|
||||
.thread_name("clash-verge-network-fallback")
|
||||
.build()
|
||||
{
|
||||
Ok(fallback_runtime) => fallback_runtime,
|
||||
Err(fallback_err) => {
|
||||
log::error!(
|
||||
"Failed to create fallback runtime: {}. This is critical.",
|
||||
fallback_err
|
||||
);
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
NetworkManager {
|
||||
runtime: Arc::new(runtime),
|
||||
@@ -66,7 +90,7 @@ impl NetworkManager {
|
||||
logging!(info, Type::Network, true, "初始化网络管理器");
|
||||
|
||||
// 创建无代理客户端
|
||||
let no_proxy_client = ClientBuilder::new()
|
||||
let no_proxy_client = match ClientBuilder::new()
|
||||
.use_rustls_tls()
|
||||
.no_proxy()
|
||||
.pool_max_idle_per_host(POOL_MAX_IDLE_PER_HOST)
|
||||
@@ -74,7 +98,19 @@ impl NetworkManager {
|
||||
.connect_timeout(Duration::from_secs(10))
|
||||
.timeout(Duration::from_secs(30))
|
||||
.build()
|
||||
.expect("Failed to build no_proxy client");
|
||||
{
|
||||
Ok(client) => client,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
true,
|
||||
"Failed to build no_proxy client: {}",
|
||||
e
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut no_proxy_guard = NetworkManager::global().no_proxy_client.lock();
|
||||
*no_proxy_guard = Some(no_proxy_client);
|
||||
@@ -214,7 +250,45 @@ impl NetworkManager {
|
||||
builder = builder.user_agent(version);
|
||||
}
|
||||
|
||||
let client = builder.build().expect("Failed to build custom HTTP client");
|
||||
let client = match builder.build() {
|
||||
Ok(client) => client,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
true,
|
||||
"Failed to build custom HTTP client: {}",
|
||||
e
|
||||
);
|
||||
// Return a simple no-proxy client as fallback
|
||||
match ClientBuilder::new()
|
||||
.use_rustls_tls()
|
||||
.no_proxy()
|
||||
.timeout(DEFAULT_REQUEST_TIMEOUT)
|
||||
.build()
|
||||
{
|
||||
Ok(fallback_client) => fallback_client,
|
||||
Err(fallback_err) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
true,
|
||||
"Failed to create fallback client: {}",
|
||||
fallback_err
|
||||
);
|
||||
self.record_connection_error(&format!(
|
||||
"Critical client build failure: {}",
|
||||
fallback_err
|
||||
));
|
||||
// Return a minimal client that will likely fail but won't panic
|
||||
ClientBuilder::new().build().unwrap_or_else(|_| {
|
||||
// If even the most basic client fails, this is truly critical
|
||||
std::process::exit(1);
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
client.get(url)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user