fix(tauri): reduce Tokio task stack usage for embedded Warp server

This commit is contained in:
Tunglies
2025-12-21 16:14:16 +08:00
parent f9b8a658a1
commit 4a9a4ab6e1

View File

@@ -79,24 +79,11 @@ pub fn embed_server() {
}); });
let pac = warp::path!("commands" / "pac").and_then(|| async move { let pac = warp::path!("commands" / "pac").and_then(|| async move {
let verge_config = Config::verge().await; let pac_content = get_currentt_pac_content().await;
let clash_config = Config::clash().await;
let pac_content = verge_config
.data_arc()
.pac_file_content
.clone()
.unwrap_or_else(|| DEFAULT_PAC.into());
let pac_port = verge_config
.data_arc()
.verge_mixed_port
.unwrap_or_else(|| clash_config.data_arc().get_mixed_port());
let processed_content = pac_content.replace("%mixed-port%", &format!("{pac_port}"));
Ok::<_, warp::Rejection>( Ok::<_, warp::Rejection>(
warp::http::Response::builder() warp::http::Response::builder()
.header("Content-Type", "application/x-ns-proxy-autoconfig") .header("Content-Type", "application/x-ns-proxy-autoconfig")
.body(processed_content) .body(pac_content)
.unwrap_or_default(), .unwrap_or_default(),
) )
}); });
@@ -105,9 +92,7 @@ pub fn embed_server() {
let scheme = warp::path!("commands" / "scheme") let scheme = warp::path!("commands" / "scheme")
.and(warp::query::<QueryParam>()) .and(warp::query::<QueryParam>())
.and_then(|query: QueryParam| async move { .and_then(|query: QueryParam| async move {
AsyncHandler::spawn(|| async move {
logging_error!(Type::Setup, resolve::resolve_scheme(&query.param).await); logging_error!(Type::Setup, resolve::resolve_scheme(&query.param).await);
});
Ok::<_, warp::Rejection>(warp::reply::with_status::<std::string::String>( Ok::<_, warp::Rejection>(warp::reply::with_status::<std::string::String>(
"ok".to_string(), "ok".to_string(),
warp::http::StatusCode::OK, warp::http::StatusCode::OK,
@@ -116,7 +101,28 @@ pub fn embed_server() {
let commands = visible.or(scheme).or(pac); let commands = visible.or(scheme).or(pac);
if let Ok(rt) = tokio::runtime::Builder::new_current_thread()
.thread_name("clash-verge-rev-embed-server")
.worker_threads(1)
.build()
{
rt.spawn(async move {
run(commands, port, shutdown_rx).await;
});
} else {
// Running in tauri's tokio runtime will cause blocking issues and lots of large task stacks
// But we should keep this as a fallback plan or we can't start the app in some environments
AsyncHandler::spawn(move || async move { AsyncHandler::spawn(move || async move {
run(commands, port, shutdown_rx).await;
});
}
}
async fn run(
commands: impl warp::Filter<Extract = impl warp::Reply> + Clone + Send + Sync + 'static,
port: u16,
shutdown_rx: oneshot::Receiver<()>,
) {
warp::serve(commands) warp::serve(commands)
.bind(([127, 0, 0, 1], port)) .bind(([127, 0, 0, 1], port))
.await .await
@@ -125,7 +131,6 @@ pub fn embed_server() {
}) })
.run() .run()
.await; .await;
});
} }
pub fn shutdown_embedded_server() { pub fn shutdown_embedded_server() {
@@ -136,3 +141,23 @@ pub fn shutdown_embedded_server() {
sender.send(()).ok(); sender.send(()).ok();
} }
} }
async fn get_currentt_pac_content() -> std::string::String {
let pac_content = {
Config::verge()
.await
.data_arc()
.pac_file_content
.clone()
.unwrap_or_else(|| DEFAULT_PAC.into())
};
let clash_mixed_port = { Config::clash().await.data_arc().get_mixed_port() };
let pac_port = {
Config::verge()
.await
.data_arc()
.verge_mixed_port
.unwrap_or(clash_mixed_port)
};
pac_content.replace("%mixed-port%", &format!("{pac_port}"))
}