Compare commits

...

17 Commits

Author SHA1 Message Date
Tunglies
6477dd61c3 perf: reduce various timeout and retry intervals for improved responsiveness to fetch proxy infomation (#6072) 2026-01-25 07:31:34 +00:00
Tunglies
6ded9bdcde doc: changelog 2026-01-25 15:40:58 +08:00
Tunglies
13dc3feb9f perf: migrate fs method to async (#6071)
* perf(profiles): migrate file handling to async and improve error handling

* refactor(profiles): simplify cleanup_orphaned_files and adjust CleanupResult structure
2026-01-25 07:20:12 +00:00
Tunglies
c7462716e5 refactor: reduce duplicated separately useSWR (#6153)
* refactor: reduce duplicated seperatlly useSWR

* refactor: streamline useSWR integration and improve error handling
2026-01-25 07:14:45 +00:00
Tunglies
bf189bb144 perf: improve config processing (#6091)
* perf: improve config processing

* perf: enhance profile reordering logic and adjust logging level

* perf: add PartialEq derive to PrfSelected and PrfExtra structs for improved comparison

* perf: refactor PrfOption merge logic and streamline update_item method in IProfiles

* perf: simplify current_mapping and profiles_preview methods in IProfiles for improved readability

* perf: optimize filename matching logic in IProfiles by using a static regex
2026-01-25 07:13:38 +00:00
Tunglies
0c6631ebb0 fix(ip-info-card): handle offline state and clashConfig absence in IP info fetching (#6085)
* fix(ip-info-card): handle offline state and clashConfig absence in IP info fetching

* fix: eslint errors
2026-01-25 07:12:17 +00:00
Sline
93e7ac1bce feat(webdav): cache connection status and adjust auto-refresh behavior (#6129) 2026-01-25 06:49:12 +00:00
Sline
b921098182 refactor(connections): switch manager table to TanStack column accessors and IConnectionsItem rows (#6083)
* refactor(connection-table): drive column order/visibility/sorting by TanStack Table state

* refactor(connection-table): simplify table data flow and align with built-in API

* refactor(connection-table): let column manager consume TanStack Table columns directly
2026-01-25 06:49:10 +00:00
Sline
440f95f617 feat(misc-viewer): optional delay check interval (#6145)
Co-authored-by: Tunglies <tunglies.dev@outlook.com>
2026-01-25 06:48:16 +00:00
Tunglies
b9667ad349 chore: bump version to 2.4.6 2026-01-25 14:22:22 +08:00
Tunglies
4e7cdbfcc0 Release: 2.4.5 2026-01-25 14:05:57 +08:00
Tunglies
966fd68087 fix(unix): update clash_verge_service_ipc to 2.1.1 to fix directory permissions 2026-01-25 13:35:18 +08:00
Tunglies
334cec3bde fix: update tauri-plugin-mihomo version, improve error handling #6149 2026-01-24 09:19:52 +08:00
Tunglies
6e16133393 ci(Mergify): configuration update (#6152)
Signed-off-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
2026-01-23 14:35:57 +00:00
Tunglies
5e976c2fe1 chore: inline crate clash-verge-types to module for better maintenance (#6142) 2026-01-23 14:00:51 +00:00
DikozImpact
d81aa5f233 Ru language fix (#6143)
* Ru language fix

* Update proxies.json

* Update home.json
2026-01-23 07:42:31 +08:00
Tunglies
e5fc0de39a ci: downgrade Ubuntu version in autobuild workflow 2026-01-22 22:08:19 +08:00
60 changed files with 925 additions and 814 deletions

View File

@@ -253,10 +253,12 @@ jobs:
fail-fast: false
matrix:
include:
- os: ubuntu-24.04
# It should be ubuntu-22.04 to match the cross-compilation environment
# ortherwise it is hard to resolve the dependencies
- os: ubuntu-22.04
target: aarch64-unknown-linux-gnu
arch: arm64
- os: ubuntu-24.04
- os: ubuntu-22.04
target: armv7-unknown-linux-gnueabihf
arch: armhf
runs-on: ${{ matrix.os }}
@@ -311,39 +313,35 @@ jobs:
- name: Release ${{ env.TAG_CHANNEL }} Version
run: pnpm release-version autobuild-latest
- name: Setup for linux
run: |
sudo dpkg --add-architecture ${{ matrix.arch }}
- name: "Setup for linux"
run: |-
sudo ls -lR /etc/apt/
sudo rm -f /etc/apt/sources.list.d/ubuntu.sources
sudo tee /etc/apt/sources.list << EOF
deb [arch=amd64,i386] http://archive.ubuntu.com/ubuntu noble main restricted universe multiverse
deb [arch=amd64,i386] http://archive.ubuntu.com/ubuntu noble-security main restricted universe multiverse
deb [arch=amd64,i386] http://archive.ubuntu.com/ubuntu noble-updates main restricted universe multiverse
deb [arch=amd64,i386] http://archive.ubuntu.com/ubuntu noble-backports main restricted universe multiverse
cat > /tmp/sources.list << EOF
deb [arch=amd64,i386] http://archive.ubuntu.com/ubuntu jammy main multiverse universe restricted
deb [arch=amd64,i386] http://archive.ubuntu.com/ubuntu jammy-security main multiverse universe restricted
deb [arch=amd64,i386] http://archive.ubuntu.com/ubuntu jammy-updates main multiverse universe restricted
deb [arch=amd64,i386] http://archive.ubuntu.com/ubuntu jammy-backports main multiverse universe restricted
deb [arch=${{ matrix.arch }}] http://ports.ubuntu.com/ubuntu-ports noble main restricted universe multiverse
deb [arch=${{ matrix.arch }}] http://ports.ubuntu.com/ubuntu-ports noble-security main restricted universe multiverse
deb [arch=${{ matrix.arch }}] http://ports.ubuntu.com/ubuntu-ports noble-updates main restricted universe multiverse
deb [arch=${{ matrix.arch }}] http://ports.ubuntu.com/ubuntu-ports noble-backports main restricted universe multiverse
deb [arch=armhf,arm64] http://ports.ubuntu.com/ubuntu-ports jammy main multiverse universe restricted
deb [arch=armhf,arm64] http://ports.ubuntu.com/ubuntu-ports jammy-security main multiverse universe restricted
deb [arch=armhf,arm64] http://ports.ubuntu.com/ubuntu-ports jammy-updates main multiverse universe restricted
deb [arch=armhf,arm64] http://ports.ubuntu.com/ubuntu-ports jammy-backports main multiverse universe restricted
EOF
sudo apt-get update -y
sudo mv /etc/apt/sources.list /etc/apt/sources.list.default
sudo mv /tmp/sources.list /etc/apt/sources.list
sudo apt-get install -y libglib2.0-dev-bin
sudo dpkg --add-architecture ${{ matrix.arch }}
sudo apt update
sudo apt-get install -y \
linux-libc-dev:${{ matrix.arch }} \
libc6-dev:${{ matrix.arch }} \
libicu-dev:${{ matrix.arch }}
sudo apt-get install -y --no-install-recommends \
libxslt1-dev:${{ matrix.arch }} \
sudo apt install -y \
libxslt1.1:${{ matrix.arch }} \
libwebkit2gtk-4.1-dev:${{ matrix.arch }} \
libayatana-appindicator3-dev:${{ matrix.arch }} \
libssl-dev:${{ matrix.arch }} \
librsvg2-dev:${{ matrix.arch }} \
patchelf
patchelf:${{ matrix.arch }} \
librsvg2-dev:${{ matrix.arch }}
- name: Install aarch64 tools
if: matrix.target == 'aarch64-unknown-linux-gnu'

5
.mergify.yml Normal file
View File

@@ -0,0 +1,5 @@
queue_rules:
- name: LetMeMergeForYou
batch_size: 3
allow_queue_branch_edit: true
queue_conditions: []

61
Cargo.lock generated
View File

@@ -156,7 +156,7 @@ dependencies = [
"objc2-foundation",
"parking_lot",
"percent-encoding",
"windows-sys 0.60.2",
"windows-sys 0.59.0",
"wl-clipboard-rs",
"x11rb",
]
@@ -1109,7 +1109,7 @@ checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32"
[[package]]
name = "clash-verge"
version = "2.4.5-rc.2"
version = "2.4.6"
dependencies = [
"aes-gcm",
"anyhow",
@@ -1124,7 +1124,6 @@ dependencies = [
"clash-verge-i18n",
"clash-verge-logging",
"clash-verge-signal",
"clash-verge-types",
"clash_verge_logger",
"clash_verge_service_ipc",
"compact_str",
@@ -1219,15 +1218,6 @@ dependencies = [
"tokio",
]
[[package]]
name = "clash-verge-types"
version = "0.1.0"
dependencies = [
"serde",
"serde_yaml_ng",
"smartstring",
]
[[package]]
name = "clash_verge_logger"
version = "0.2.2"
@@ -1243,8 +1233,8 @@ dependencies = [
[[package]]
name = "clash_verge_service_ipc"
version = "2.1.0"
source = "git+https://github.com/clash-verge-rev/clash-verge-service-ipc#dab73f0f6c69fb72408f84beebf4d042137950c8"
version = "2.1.1"
source = "git+https://github.com/clash-verge-rev/clash-verge-service-ipc#d9a3b701008a0b55ab21aaa8e5a60ce140dc90b9"
dependencies = [
"anyhow",
"compact_str",
@@ -2022,7 +2012,7 @@ dependencies = [
"libc",
"option-ext",
"redox_users 0.5.2",
"windows-sys 0.61.2",
"windows-sys 0.59.0",
]
[[package]]
@@ -2286,7 +2276,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
dependencies = [
"libc",
"windows-sys 0.61.2",
"windows-sys 0.59.0",
]
[[package]]
@@ -3442,12 +3432,12 @@ dependencies = [
"libc",
"percent-encoding",
"pin-project-lite",
"socket2 0.6.1",
"socket2 0.5.10",
"system-configuration",
"tokio",
"tower-service",
"tracing",
"windows-registry 0.6.1",
"windows-registry",
]
[[package]]
@@ -3775,7 +3765,7 @@ checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46"
dependencies = [
"hermit-abi 0.5.2",
"libc",
"windows-sys 0.61.2",
"windows-sys 0.59.0",
]
[[package]]
@@ -4507,7 +4497,7 @@ version = "0.50.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
dependencies = [
"windows-sys 0.61.2",
"windows-sys 0.59.0",
]
[[package]]
@@ -4911,7 +4901,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d8fae84b431384b68627d0f9b3b1245fcf9f46f6c0e3dc902e9dce64edd1967"
dependencies = [
"libc",
"windows-sys 0.61.2",
"windows-sys 0.48.0",
]
[[package]]
@@ -5720,7 +5710,7 @@ dependencies = [
"quinn-udp",
"rustc-hash",
"rustls",
"socket2 0.6.1",
"socket2 0.5.10",
"thiserror 2.0.18",
"tokio",
"tracing",
@@ -5758,9 +5748,9 @@ dependencies = [
"cfg_aliases",
"libc",
"once_cell",
"socket2 0.6.1",
"socket2 0.5.10",
"tracing",
"windows-sys 0.60.2",
"windows-sys 0.59.0",
]
[[package]]
@@ -6380,7 +6370,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys 0.11.0",
"windows-sys 0.61.2",
"windows-sys 0.59.0",
]
[[package]]
@@ -6438,7 +6428,7 @@ dependencies = [
"security-framework",
"security-framework-sys",
"webpki-root-certs",
"windows-sys 0.61.2",
"windows-sys 0.59.0",
]
[[package]]
@@ -7597,7 +7587,7 @@ dependencies = [
"thiserror 2.0.18",
"tracing",
"url",
"windows-registry 0.5.3",
"windows-registry",
"windows-result 0.3.4",
]
@@ -7710,7 +7700,7 @@ dependencies = [
[[package]]
name = "tauri-plugin-mihomo"
version = "0.1.3"
source = "git+https://github.com/clash-verge-rev/tauri-plugin-mihomo#65500f248533c0700a65f0f081e4bcadda4bff35"
source = "git+https://github.com/clash-verge-rev/tauri-plugin-mihomo#322d9f965b5daeb01bc9b16c73d199bb524c7a98"
dependencies = [
"base64 0.22.1",
"futures-util",
@@ -7953,7 +7943,7 @@ dependencies = [
"getrandom 0.3.4",
"once_cell",
"rustix 1.1.3",
"windows-sys 0.61.2",
"windows-sys 0.59.0",
]
[[package]]
@@ -9337,7 +9327,7 @@ version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
dependencies = [
"windows-sys 0.61.2",
"windows-sys 0.48.0",
]
[[package]]
@@ -9517,17 +9507,6 @@ dependencies = [
"windows-strings 0.4.2",
]
[[package]]
name = "windows-registry"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720"
dependencies = [
"windows-link 0.2.1",
"windows-result 0.4.1",
"windows-strings 0.5.1",
]
[[package]]
name = "windows-result"
version = "0.3.4"

View File

@@ -5,7 +5,6 @@ members = [
"crates/clash-verge-logging",
"crates/clash-verge-signal",
"crates/tauri-plugin-clash-verge-sysinfo",
"crates/clash-verge-types",
"crates/clash-verge-i18n",
]
resolver = "2"
@@ -44,7 +43,6 @@ strip = false
clash-verge-draft = { path = "crates/clash-verge-draft" }
clash-verge-logging = { path = "crates/clash-verge-logging" }
clash-verge-signal = { path = "crates/clash-verge-signal" }
clash-verge-types = { path = "crates/clash-verge-types" }
clash-verge-i18n = { path = "crates/clash-verge-i18n" }
tauri-plugin-clash-verge-sysinfo = { path = "crates/tauri-plugin-clash-verge-sysinfo" }

View File

@@ -1,53 +1,22 @@
## v2.4.5
- **Mihomo(Meta) 内核升级至 v1.19.19**
## v(2.4.6)
### 🐞 修复问题
- 修复 macOS 有线网络 DNS 劫持失败
- 修复 Monaco 编辑器内右键菜单显示异常
- 修复设置代理端口时检查端口占用
- 修复 Monaco 编辑器初始化卡 Loading
- 修复恢复备份时 `config.yaml` / `profiles.yaml` 文件内字段未正确恢复
- 修复 Windows 下系统主题同步问题
- 修复 URL Schemes 无法正常导入
- 修复 Linux 下无法安装 TUN 服务
- 修复可能的端口被占用误报
- 修复设置允许外部控制来源不能立即生效
- 修复前端性能回归问题
- 修复首次启动时代理信息刷新缓慢
- 修复无网络时无限请求 IP 归属查询
- 修复 WebDAV 页面重试逻辑
<details>
<summary><strong> ✨ 新增功能 </strong></summary>
- 允许代理页面允许高级过滤搜索
- 备份设置页面新增导入备份按钮
- 允许修改通知弹窗位置
- 支持收起导航栏(导航栏右键菜单 / 界面设置)
- 允许将出站模式显示在托盘一级菜单
- 允许禁用在托盘中显示代理组
- 支持在「编辑节点」中直接导入 AnyTLS URI 配置
- 支持关闭「验证代理绕过格式」
- 新增系统代理绕过和 TUN 排除自定义网段的可视化编辑器
- 支持订阅设置自动延时监测间隔
</details>
<details>
<summary><strong> 🚀 优化改进 </strong></summary>
- 应用内更新日志支持解析并渲染 HTML 标签
- 性能优化前后端在渲染流量图时的资源
- 在 Linux NVIDIA 显卡环境下尝试禁用 WebKit DMABUF 渲染以规避潜在问题
- Windows 下自启动改为计划任务实现
- 改进托盘和窗口操作频率限制实现
- 使用「编辑节点」添加节点时,自动将节点添加到第一个 `select` 类型的代理组的第一位
- 隐藏侧边导航栏和悬浮跳转导航的滚动条
- 完善对 AnyTLS / Mieru / Sudoku 的 GUI 支持
- macOS 和 Linux 对服务 IPC 权限进一步限制
- 移除 Windows 自启动计划任务中冗余的 3 秒延时
- 右键错误通知可复制错误详情
- 保存 TUN 设置时优化执行流程,避免界面卡顿
- 补充 `deb` / `rpm` 依赖 `libayatana-appindicator`
- 「连接」表格标题的排序点击区域扩展到整列宽度
- 备份恢复时显示加载覆盖层,恢复过程无需再手动关闭对话框
- 后端性能优化
- 前端性能优化
</details>

View File

@@ -1,13 +0,0 @@
[package]
name = "clash-verge-types"
version = "0.1.0"
edition = "2024"
rust-version = "1.91"
[dependencies]
serde = { workspace = true }
serde_yaml_ng = { workspace = true }
smartstring = { workspace = true }
[lints]
workspace = true

View File

@@ -1 +0,0 @@
pub mod runtime;

View File

@@ -1,3 +1,57 @@
## v2.4.5
- **Mihomo(Meta) 内核升级至 v1.19.19**
### 🐞 修复问题
- 修复 macOS 有线网络 DNS 劫持失败
- 修复 Monaco 编辑器内右键菜单显示异常
- 修复设置代理端口时检查端口占用
- 修复 Monaco 编辑器初始化卡 Loading
- 修复恢复备份时 `config.yaml` / `profiles.yaml` 文件内字段未正确恢复
- 修复 Windows 下系统主题同步问题
- 修复 URL Schemes 无法正常导入
- 修复 Linux 下无法安装 TUN 服务
- 修复可能的端口被占用误报
- 修复设置允许外部控制来源不能立即生效
- 修复前端性能回归问题
<details>
<summary><strong> ✨ 新增功能 </strong></summary>
- 允许代理页面允许高级过滤搜索
- 备份设置页面新增导入备份按钮
- 允许修改通知弹窗位置
- 支持收起导航栏(导航栏右键菜单 / 界面设置)
- 允许将出站模式显示在托盘一级菜单
- 允许禁用在托盘中显示代理组
- 支持在「编辑节点」中直接导入 AnyTLS URI 配置
- 支持关闭「验证代理绕过格式」
- 新增系统代理绕过和 TUN 排除自定义网段的可视化编辑器
</details>
<details>
<summary><strong> 🚀 优化改进 </strong></summary>
- 应用内更新日志支持解析并渲染 HTML 标签
- 性能优化前后端在渲染流量图时的资源
- 在 Linux NVIDIA 显卡环境下尝试禁用 WebKit DMABUF 渲染以规避潜在问题
- Windows 下自启动改为计划任务实现
- 改进托盘和窗口操作频率限制实现
- 使用「编辑节点」添加节点时,自动将节点添加到第一个 `select` 类型的代理组的第一位
- 隐藏侧边导航栏和悬浮跳转导航的滚动条
- 完善对 AnyTLS / Mieru / Sudoku 的 GUI 支持
- macOS 和 Linux 对服务 IPC 权限进一步限制
- 移除 Windows 自启动计划任务中冗余的 3 秒延时
- 右键错误通知可复制错误详情
- 保存 TUN 设置时优化执行流程,避免界面卡顿
- 补充 `deb` / `rpm` 依赖 `libayatana-appindicator`
- 「连接」表格标题的排序点击区域扩展到整列宽度
- 备份恢复时显示加载覆盖层,恢复过程无需再手动关闭对话框
</details>
## v2.4.4
- **Mihomo(Meta) 内核升级至 v1.19.17**

View File

@@ -1,6 +1,6 @@
{
"name": "clash-verge",
"version": "2.4.5-rc.2",
"version": "2.4.6",
"license": "GPL-3.0-only",
"scripts": {
"prepare": "husky || true",

View File

@@ -1,6 +1,6 @@
[package]
name = "clash-verge"
version = "2.4.5-rc.2"
version = "2.4.6"
description = "clash verge"
authors = ["zzzgydi", "Tunglies", "wonfen", "MystiPanda"]
license = "GPL-3.0-only"
@@ -34,7 +34,6 @@ tauri-build = { version = "2.5.3", features = [] }
clash-verge-draft = { workspace = true }
clash-verge-logging = { workspace = true }
clash-verge-signal = { workspace = true }
clash-verge-types = { workspace = true }
clash-verge-i18n = { workspace = true }
tauri-plugin-clash-verge-sysinfo = { workspace = true }
tauri-plugin-clipboard-manager = { workspace = true }
@@ -99,7 +98,7 @@ tauri-plugin-devtools = { version = "2.0.1" }
tauri-plugin-mihomo = { git = "https://github.com/clash-verge-rev/tauri-plugin-mihomo" }
clash_verge_logger = { git = "https://github.com/clash-verge-rev/clash-verge-logger" }
async-trait = "0.1.89"
clash_verge_service_ipc = { version = "2.1.0", features = [
clash_verge_service_ipc = { version = "2.1.1", features = [
"client",
], git = "https://github.com/clash-verge-rev/clash-verge-service-ipc" }
arc-swap = "1.8.0"

View File

@@ -117,7 +117,7 @@ pub async fn import_profile(url: std::string::String, option: Option<PrfOption>)
pub async fn reorder_profile(active_id: String, over_id: String) -> CmdResult {
match profiles_reorder_safe(&active_id, &over_id).await {
Ok(_) => {
logging!(info, Type::Cmd, "重新排序配置文件");
logging!(debug, Type::Cmd, "重新排序配置文件");
Config::profiles().await.apply();
Ok(())
}

View File

@@ -1,6 +1,6 @@
use super::{IClashTemp, IProfiles, IVerge};
use crate::{
config::{PrfItem, profiles_append_item_safe},
config::{PrfItem, profiles_append_item_safe, runtime::IRuntime},
constants::{files, timing},
core::{
CoreManager,
@@ -16,7 +16,6 @@ use anyhow::{Result, anyhow};
use backoff::{Error as BackoffError, ExponentialBackoff};
use clash_verge_draft::Draft;
use clash_verge_logging::{Type, logging, logging_error};
use clash_verge_types::runtime::IRuntime;
use smartstring::alias::String;
use std::path::PathBuf;
use tauri_plugin_clash_verge_sysinfo::is_current_app_handle_admin;

View File

@@ -4,6 +4,7 @@ mod config;
mod encrypt;
mod prfitem;
pub mod profiles;
pub mod runtime;
mod verge;
pub use self::{clash::*, config::*, encrypt::*, prfitem::*, profiles::*, verge::*};

View File

@@ -60,13 +60,13 @@ pub struct PrfItem {
pub file_data: Option<String>,
}
#[derive(Default, Debug, Clone, Deserialize, Serialize)]
#[derive(Default, Debug, Clone, PartialEq, Deserialize, Serialize)]
pub struct PrfSelected {
pub name: Option<String>,
pub now: Option<String>,
}
#[derive(Default, Debug, Clone, Copy, Deserialize, Serialize)]
#[derive(Default, Debug, Clone, Copy, PartialEq, Deserialize, Serialize)]
pub struct PrfExtra {
pub upload: u64,
pub download: u64,
@@ -124,25 +124,22 @@ pub struct PrfOption {
impl PrfOption {
pub fn merge(one: Option<&Self>, other: Option<&Self>) -> Option<Self> {
match (one, other) {
(Some(a_ref), Some(b_ref)) => {
let mut result = a_ref.clone();
result.user_agent = b_ref.user_agent.clone().or(result.user_agent);
result.with_proxy = b_ref.with_proxy.or(result.with_proxy);
result.self_proxy = b_ref.self_proxy.or(result.self_proxy);
result.danger_accept_invalid_certs =
b_ref.danger_accept_invalid_certs.or(result.danger_accept_invalid_certs);
result.allow_auto_update = b_ref.allow_auto_update.or(result.allow_auto_update);
result.update_interval = b_ref.update_interval.or(result.update_interval);
result.merge = b_ref.merge.clone().or(result.merge);
result.script = b_ref.script.clone().or(result.script);
result.rules = b_ref.rules.clone().or(result.rules);
result.proxies = b_ref.proxies.clone().or(result.proxies);
result.groups = b_ref.groups.clone().or(result.groups);
result.timeout_seconds = b_ref.timeout_seconds.or(result.timeout_seconds);
Some(result)
}
(Some(a_ref), None) => Some(a_ref.clone()),
(None, Some(b_ref)) => Some(b_ref.clone()),
(Some(a), Some(b)) => Some(Self {
user_agent: b.user_agent.as_ref().or(a.user_agent.as_ref()).cloned(),
with_proxy: b.with_proxy.or(a.with_proxy),
self_proxy: b.self_proxy.or(a.self_proxy),
danger_accept_invalid_certs: b.danger_accept_invalid_certs.or(a.danger_accept_invalid_certs),
allow_auto_update: b.allow_auto_update.or(a.allow_auto_update),
update_interval: b.update_interval.or(a.update_interval),
merge: b.merge.as_ref().or(a.merge.as_ref()).cloned(),
script: b.script.as_ref().or(a.script.as_ref()).cloned(),
rules: b.rules.as_ref().or(a.rules.as_ref()).cloned(),
proxies: b.proxies.as_ref().or(a.proxies.as_ref()).cloned(),
groups: b.groups.as_ref().or(a.groups.as_ref()).cloned(),
timeout_seconds: b.timeout_seconds.or(a.timeout_seconds),
}),
(Some(a), None) => Some(a.clone()),
(None, Some(b)) => Some(b.clone()),
(None, None) => None,
}
}

View File

@@ -5,12 +5,16 @@ use crate::utils::{
};
use anyhow::{Context as _, Result, bail};
use clash_verge_logging::{Type, logging};
use once_cell::sync::OnceCell;
use regex::Regex;
use serde::{Deserialize, Serialize};
use serde_yaml_ng::Mapping;
use smartstring::alias::String;
use std::collections::HashSet;
use std::collections::{HashMap, HashSet};
use tokio::fs;
static PROFILE_FILE_RE: OnceCell<Regex> = OnceCell::new();
/// Define the `profiles.yaml` schema
#[derive(Default, Debug, Clone, Deserialize, Serialize)]
pub struct IProfiles {
@@ -31,29 +35,21 @@ pub struct IProfilePreview<'a> {
#[derive(Debug, Clone)]
pub struct CleanupResult {
pub total_files: usize,
pub deleted_files: Vec<String>,
pub failed_deletions: Vec<String>,
pub deleted_files: usize,
pub failed_deletions: usize,
}
macro_rules! patch {
($lv: expr, $rv: expr, $key: tt) => {
if ($rv.$key).is_some() {
$lv.$key = $rv.$key.to_owned();
if let Some(ref val) = $rv.$key {
if Some(val) != $lv.$key.as_ref() {
$lv.$key = Some(val.to_owned());
}
}
};
}
impl IProfiles {
// Helper to find and remove an item by uid from the items vec, returning its file name (if any).
fn take_item_file_by_uid(items: &mut Vec<PrfItem>, target_uid: Option<String>) -> Option<String> {
for (i, _) in items.iter().enumerate() {
if items[i].uid == target_uid {
return items.remove(i).file;
}
}
None
}
pub async fn new() -> Self {
let path = match dirs::profiles_path() {
Ok(p) => p,
@@ -63,21 +59,22 @@ impl IProfiles {
}
};
match help::read_yaml::<Self>(&path).await {
Ok(mut profiles) => {
let items = profiles.items.get_or_insert_with(Vec::new);
for item in items.iter_mut() {
if item.uid.is_none() {
item.uid = Some(help::get_uid("d").into());
}
}
profiles
}
let mut profiles = match help::read_yaml::<Self>(&path).await {
Ok(profiles) => profiles,
Err(err) => {
logging!(error, Type::Config, "{err}");
Self::default()
return Self::default();
}
};
let items = profiles.items.get_or_insert_with(Vec::new);
for item in items.iter_mut() {
if item.uid.is_none() {
item.uid = Some(help::get_uid("d").into());
}
}
profiles
}
pub async fn save_file(&self) -> Result<()> {
@@ -113,38 +110,28 @@ impl IProfiles {
pub fn get_item(&self, uid: impl AsRef<str>) -> Result<&PrfItem> {
let uid_str = uid.as_ref();
if let Some(items) = self.items.as_ref() {
for each in items.iter() {
if let Some(uid_val) = &each.uid
&& uid_val.as_str() == uid_str
{
return Ok(each);
}
}
}
bail!("failed to get the profile item \"uid:{}\"", uid_str);
self.items
.as_ref()
.ok_or_else(|| anyhow::anyhow!("no profile items found"))?
.iter()
.find(|each| each.uid.as_ref().is_some_and(|uid_val| uid_val.as_str() == uid_str))
.ok_or_else(|| anyhow::anyhow!("failed to get the profile item \"uid:{}\"", uid_str))
}
/// append new item
/// if the file_data is some
/// then should save the data to file
pub async fn append_item(&mut self, item: &mut PrfItem) -> Result<()> {
let uid = &item.uid;
if uid.is_none() {
bail!("the uid should not be null");
}
anyhow::ensure!(item.uid.is_some(), "the uid should not be null");
// save the file data
// move the field value after save
if let Some(file_data) = item.file_data.take() {
if item.file.is_none() {
bail!("the file should not be null");
}
anyhow::ensure!(item.file.is_some(), "the file should not be null");
let file = item
.file
.clone()
.as_ref()
.ok_or_else(|| anyhow::anyhow!("file field is required when file_data is provided"))?;
let path = dirs::app_profiles_dir()?.join(file.as_str());
@@ -153,111 +140,116 @@ impl IProfiles {
.with_context(|| format!("failed to write to file \"{file}\""))?;
}
if self.current.is_none() && (item.itype == Some("remote".into()) || item.itype == Some("local".into())) {
self.current = uid.to_owned();
if self.current.is_none()
&& let Some(t) = item.itype.as_deref()
&& (t == "remote" || t == "local")
{
self.current = item.uid.to_owned();
}
if self.items.is_none() {
self.items = Some(vec![]);
}
if let Some(items) = self.items.as_mut() {
items.push(item.to_owned());
}
self.items.get_or_insert_default().push(std::mem::take(item));
Ok(())
}
/// reorder items
pub async fn reorder(&mut self, active_id: &String, over_id: &String) -> Result<()> {
let mut items = self.items.take().unwrap_or_default();
let mut old_index = None;
let mut new_index = None;
pub async fn reorder(&mut self, active_id: &str, over_id: &str) -> Result<()> {
if active_id == over_id {
return Ok(());
}
for (i, _) in items.iter().enumerate() {
if items[i].uid.as_ref() == Some(active_id) {
old_index = Some(i);
let Some(items) = self.items.as_mut() else {
return Ok(());
};
let mut old_idx = None;
let mut new_idx = None;
for (i, item) in items.iter().enumerate() {
if let Some(uid) = item.uid.as_ref() {
if uid == active_id {
old_idx = Some(i);
}
if uid == over_id {
new_idx = Some(i);
}
}
if items[i].uid.as_ref() == Some(over_id) {
new_index = Some(i);
if old_idx.is_some() && new_idx.is_some() {
break;
}
}
let (old_idx, new_idx) = match (old_index, new_index) {
(Some(old), Some(new)) => (old, new),
_ => return Ok(()),
};
let item = items.remove(old_idx);
items.insert(new_idx, item);
self.items = Some(items);
self.save_file().await
if let (Some(old), Some(new)) = (old_idx, new_idx) {
if old < new {
items[old..=new].rotate_left(1);
} else {
items[new..=old].rotate_right(1);
}
return self.save_file().await;
}
Ok(())
}
/// update the item value
pub async fn patch_item(&mut self, uid: &String, item: &PrfItem) -> Result<()> {
let mut items = self.items.take().unwrap_or_default();
let items = self
.items
.as_mut()
.ok_or_else(|| anyhow::anyhow!("no profile items found"))?;
for each in items.iter_mut() {
if each.uid.as_ref() == Some(uid) {
patch!(each, item, itype);
patch!(each, item, name);
patch!(each, item, desc);
patch!(each, item, file);
patch!(each, item, url);
patch!(each, item, selected);
patch!(each, item, extra);
patch!(each, item, updated);
patch!(each, item, option);
let target = items.iter_mut().find(|each| each.uid.as_ref() == Some(uid));
self.items = Some(items);
return self.save_file().await;
}
if let Some(each) = target {
patch!(each, item, itype);
patch!(each, item, name);
patch!(each, item, desc);
patch!(each, item, file);
patch!(each, item, url);
patch!(each, item, selected);
patch!(each, item, extra);
patch!(each, item, updated);
patch!(each, item, option);
return self.save_file().await;
}
self.items = Some(items);
bail!("failed to find the profile item \"uid:{uid}\"")
}
/// be used to update the remote item
/// only patch `updated` `extra` `file_data`
pub async fn update_item(&mut self, uid: &String, item: &mut PrfItem) -> Result<()> {
if self.items.is_none() {
self.items = Some(vec![]);
}
let target = self
.items
.get_or_insert_default()
.iter_mut()
.find(|each| each.uid.as_ref() == Some(uid))
.ok_or_else(|| anyhow::anyhow!("Item not found"))?;
// find the item
let _ = self.get_item(uid)?;
target.extra = item.extra;
target.updated = item.updated;
target.home = std::mem::take(&mut item.home);
target.option = PrfOption::merge(target.option.as_ref(), item.option.as_ref());
if let Some(items) = self.items.as_mut() {
let some_uid = Some(uid.clone());
let Some(file_data) = item.file_data.take() else {
return self.save_file().await;
};
for each in items.iter_mut() {
if each.uid == some_uid {
each.extra = item.extra;
each.updated = item.updated;
each.home = item.home.to_owned();
each.option = PrfOption::merge(each.option.as_ref(), item.option.as_ref());
// save the file data
// move the field value after save
if let Some(file_data) = item.file_data.take() {
let file = each.file.take();
let file =
file.unwrap_or_else(|| item.file.take().unwrap_or_else(|| format!("{}.yaml", &uid).into()));
let file = target
.file
.take()
.or_else(|| item.file.take())
.unwrap_or_else(|| format!("{}.yaml", uid).into());
// the file must exists
each.file = Some(file.clone());
let path = dirs::app_profiles_dir()?.join(file.as_str());
let path = dirs::app_profiles_dir()?.join(file.as_str());
fs::write(&path, file_data.as_bytes())
.await
.with_context(|| format!("failed to write to file \"{file}\""))?;
fs::write(&path, file_data.as_bytes())
.await
.with_context(|| format!("failed to write to file \"{file}\""))?;
}
break;
}
}
}
target.file = Some(file);
self.save_file().await
}
@@ -265,68 +257,82 @@ impl IProfiles {
/// delete item
/// if delete the current then return true
pub async fn delete_item(&mut self, uid: &String) -> Result<bool> {
let current = self.current.as_ref().unwrap_or(uid);
let current = current.clone();
let item = self.get_item(uid)?;
let merge_uid = item.option.as_ref().and_then(|e| e.merge.clone());
let script_uid = item.option.as_ref().and_then(|e| e.script.clone());
let rules_uid = item.option.as_ref().and_then(|e| e.rules.clone());
let proxies_uid = item.option.as_ref().and_then(|e| e.proxies.clone());
let groups_uid = item.option.as_ref().and_then(|e| e.groups.clone());
let mut items = self.items.take().unwrap_or_default();
let uids_to_remove: HashSet<String> = {
let item = self.get_item(uid)?;
let mut set = HashSet::new();
set.insert(uid.clone());
// remove the main item (if exists) and delete its file
if let Some(file) = Self::take_item_file_by_uid(&mut items, Some(uid.clone())) {
let _ = dirs::app_profiles_dir()?.join(file.as_str()).remove_if_exists().await;
}
// remove related extension items (merge, script, rules, proxies, groups)
if let Some(file) = Self::take_item_file_by_uid(&mut items, merge_uid.clone()) {
let _ = dirs::app_profiles_dir()?.join(file.as_str()).remove_if_exists().await;
}
if let Some(file) = Self::take_item_file_by_uid(&mut items, script_uid.clone()) {
let _ = dirs::app_profiles_dir()?.join(file.as_str()).remove_if_exists().await;
}
if let Some(file) = Self::take_item_file_by_uid(&mut items, rules_uid.clone()) {
let _ = dirs::app_profiles_dir()?.join(file.as_str()).remove_if_exists().await;
}
if let Some(file) = Self::take_item_file_by_uid(&mut items, proxies_uid.clone()) {
let _ = dirs::app_profiles_dir()?.join(file.as_str()).remove_if_exists().await;
}
if let Some(file) = Self::take_item_file_by_uid(&mut items, groups_uid.clone()) {
let _ = dirs::app_profiles_dir()?.join(file.as_str()).remove_if_exists().await;
}
// delete the original uid
if current == *uid {
self.current = None;
for item in items.iter() {
if item.itype == Some("remote".into()) || item.itype == Some("local".into()) {
self.current = item.uid.clone();
break;
if let Some(opt) = &item.option {
if let Some(u) = &opt.merge {
set.insert(u.clone());
}
if let Some(u) = &opt.script {
set.insert(u.clone());
}
if let Some(u) = &opt.rules {
set.insert(u.clone());
}
if let Some(u) = &opt.proxies {
set.insert(u.clone());
}
if let Some(u) = &opt.groups {
set.insert(u.clone());
}
}
set
};
let mut items = self.items.take().unwrap_or_default();
let mut deleted_files = Vec::new();
items.retain_mut(|item| {
if let Some(item_uid) = item.uid.as_ref()
&& uids_to_remove.contains(item_uid)
{
if let Some(file) = item.file.take() {
deleted_files.push(file);
}
return false;
}
true
});
let is_deleting_current = self.current.as_ref() == Some(uid);
if is_deleting_current {
self.current = items
.iter()
.find(|i| i.itype.as_deref() == Some("remote") || i.itype.as_deref() == Some("local"))
.and_then(|i| i.uid.clone());
}
self.items = Some(items);
if let Ok(profile_dir) = dirs::app_profiles_dir() {
for file in deleted_files {
let _ = profile_dir.join(file.as_str()).remove_if_exists().await;
}
}
self.save_file().await?;
Ok(current == *uid)
Ok(is_deleting_current)
}
/// 获取current指向的订阅内容
pub async fn current_mapping(&self) -> Result<Mapping> {
match (self.current.as_ref(), self.items.as_ref()) {
(Some(current), Some(items)) => {
if let Some(item) = items.iter().find(|e| e.uid.as_ref() == Some(current)) {
let file_path = match item.file.as_ref() {
Some(file) => dirs::app_profiles_dir()?.join(file.as_str()),
None => bail!("failed to get the file field"),
};
return help::read_mapping(&file_path).await;
}
bail!("failed to find the current profile \"uid:{current}\"");
}
_ => Ok(Mapping::new()),
}
let (Some(current), Some(items)) = (self.current.as_ref(), self.items.as_ref()) else {
return Ok(Mapping::new());
};
let Some(target) = items.iter().find(|e| e.uid.as_ref() == Some(current)) else {
bail!("failed to find the current profile \"uid:{current}\"");
};
let file = target
.file
.as_ref()
.ok_or_else(|| anyhow::anyhow!("failed to get the file field"))?;
let file_path = dirs::app_profiles_dir()?.join(file.as_str());
help::read_mapping(&file_path).await
}
/// 判断profile是否是current指向的
@@ -336,44 +342,40 @@ impl IProfiles {
/// 获取所有的profiles(uid名称, 是否为 current)
pub fn profiles_preview(&self) -> Option<Vec<IProfilePreview<'_>>> {
self.items.as_ref().map(|items| {
items
.iter()
.filter_map(|e| {
if let (Some(uid), Some(name)) = (e.uid.as_ref(), e.name.as_ref()) {
let is_current = self.is_current_profile_index(uid);
let preview = IProfilePreview { uid, name, is_current };
Some(preview)
} else {
None
}
let items = self.items.as_ref()?;
let current_uid = self.current.as_ref();
let previews = items
.iter()
.filter_map(|e| {
let uid = e.uid.as_ref()?;
let name = e.name.as_ref()?;
Some(IProfilePreview {
uid,
name,
is_current: current_uid == Some(uid),
})
.collect()
})
})
.collect();
Some(previews)
}
/// 通过 uid 获取名称
pub fn get_name_by_uid(&self, uid: &String) -> Option<&String> {
if let Some(items) = &self.items {
for item in items {
if item.uid.as_ref() == Some(uid) {
return item.name.as_ref();
}
}
}
None
pub fn get_name_by_uid(&self, uid: &str) -> Option<&String> {
self.items
.as_ref()?
.iter()
.find(|item| item.uid.as_deref() == Some(uid))
.and_then(|item| item.name.as_ref())
}
/// 以 app 中的 profile 列表为准,删除不再需要的文件
pub async fn cleanup_orphaned_files(&self) -> Result<CleanupResult> {
pub async fn cleanup_orphaned_files(&self) -> Result<()> {
let profiles_dir = dirs::app_profiles_dir()?;
if !profiles_dir.exists() {
return Ok(CleanupResult {
total_files: 0,
deleted_files: vec![],
failed_deletions: vec![],
});
return Ok(());
}
// 获取所有 active profile 的文件名集合
@@ -384,11 +386,11 @@ impl IProfiles {
// 扫描 profiles 目录下的所有文件
let mut total_files = 0;
let mut deleted_files = vec![];
let mut failed_deletions = vec![];
let mut deleted_files = 0;
let mut failed_deletions = 0;
for entry in std::fs::read_dir(&profiles_dir)? {
let entry = entry?;
let mut dir_entries = tokio::fs::read_dir(&profiles_dir).await?;
while let Some(entry) = dir_entries.next_entry().await? {
let path = entry.path();
if !path.is_file() {
@@ -410,11 +412,11 @@ impl IProfiles {
if !active_files.contains(file_name) {
match path.to_path_buf().remove_if_exists().await {
Ok(_) => {
deleted_files.push(file_name.into());
deleted_files += 1;
logging!(debug, Type::Config, "已清理冗余文件: {file_name}");
}
Err(e) => {
failed_deletions.push(format!("{file_name}: {e}").into());
failed_deletions += 1;
logging!(warn, Type::Config, "Warning: 清理文件失败: {file_name} - {e}");
}
}
@@ -433,11 +435,11 @@ impl IProfiles {
Type::Config,
"Profile 文件清理完成: 总文件数={}, 删除文件数={}, 失败数={}",
result.total_files,
result.deleted_files.len(),
result.failed_deletions.len()
result.deleted_files,
result.failed_deletions
);
Ok(result)
Ok(())
}
/// 不删除全局扩展配置
@@ -452,59 +454,39 @@ impl IProfiles {
/// 获取所有 active profile 关联的文件名
fn get_all_active_files(&self) -> HashSet<&str> {
let mut active_files: HashSet<&str> = HashSet::new();
let mut active_files = HashSet::new();
let items = match &self.items {
Some(i) => i,
None => return active_files,
};
if let Some(items) = &self.items {
for item in items {
// 收集所有类型 profile 的文件
if let Some(file) = &item.file {
active_files.insert(file);
}
let item_map: HashMap<Option<&str>, &PrfItem> = items.iter().map(|i| (i.uid.as_deref(), i)).collect();
// 对于主 profile 类型remote/local还需要收集其关联的扩展文件
if let Some(itype) = &item.itype
&& (itype == "remote" || itype == "local")
&& let Some(option) = &item.option
for item in items {
if let Some(f) = &item.file {
active_files.insert(f.as_str());
}
let Some(opt) = &item.option else {
continue;
};
let related = [
opt.merge.as_deref(),
opt.script.as_deref(),
opt.rules.as_deref(),
opt.proxies.as_deref(),
opt.groups.as_deref(),
];
for r_uid in related.into_iter().flatten() {
if let Some(r_item) = item_map.get(&Some(r_uid))
&& let Some(f) = &r_item.file
{
// 收集关联的扩展文件
if let Some(merge_uid) = &option.merge
&& let Ok(merge_item) = self.get_item(merge_uid)
&& let Some(file) = &merge_item.file
{
active_files.insert(file);
}
if let Some(script_uid) = &option.script
&& let Ok(script_item) = self.get_item(script_uid)
&& let Some(file) = &script_item.file
{
active_files.insert(file);
}
if let Some(rules_uid) = &option.rules
&& let Ok(rules_item) = self.get_item(rules_uid)
&& let Some(file) = &rules_item.file
{
active_files.insert(file);
}
if let Some(proxies_uid) = &option.proxies
&& let Ok(proxies_item) = self.get_item(proxies_uid)
&& let Some(file) = &proxies_item.file
{
active_files.insert(file);
}
if let Some(groups_uid) = &option.groups
&& let Ok(groups_item) = self.get_item(groups_uid)
&& let Some(file) = &groups_item.file
{
active_files.insert(file);
}
active_files.insert(f.as_str());
}
}
}
active_files
}
@@ -519,18 +501,10 @@ impl IProfiles {
// p12345678.yaml (proxies)
// g12345678.yaml (groups)
let patterns = [
r"^[RL][a-zA-Z0-9]+\.yaml$", // Remote/Local profiles
r"^m[a-zA-Z0-9]+\.yaml$", // Merge files
r"^s[a-zA-Z0-9]+\.js$", // Script files
r"^[rpg][a-zA-Z0-9]+\.yaml$", // Rules/Proxies/Groups files
];
patterns.iter().any(|pattern| {
regex::Regex::new(pattern)
.map(|re| re.is_match(filename))
.unwrap_or(false)
})
#[allow(clippy::unwrap_used)]
let re = PROFILE_FILE_RE
.get_or_init(|| Regex::new(r"^(?:[RLmprg][a-zA-Z0-9_-]+\.yaml|s[a-zA-Z0-9_-]+\.js)$").unwrap());
re.is_match(filename)
}
}

View File

@@ -2,6 +2,8 @@ use serde_yaml_ng::{Mapping, Value};
use smartstring::alias::String;
use std::collections::{HashMap, HashSet};
use crate::enhance::field::use_keys;
const PATCH_CONFIG_INNER: [&str; 4] = ["allow-lan", "ipv6", "log-level", "unified-delay"];
#[derive(Default, Clone)]
@@ -136,13 +138,3 @@ impl IRuntime {
}
}
}
// TODO 完整迁移 enhance 行为后移除
#[inline]
fn use_keys<'a>(config: &'a Mapping) -> impl Iterator<Item = String> + 'a {
config.iter().filter_map(|(key, _)| key.as_str()).map(|s: &str| {
let mut s: String = s.into();
s.make_ascii_lowercase();
s
})
}

View File

@@ -155,6 +155,9 @@ pub struct IVerge {
/// 是否自动检测当前节点延迟
pub enable_auto_delay_detection: Option<bool>,
/// 自动检测当前节点延迟的间隔(分钟)
pub auto_delay_detection_interval_minutes: Option<u64>,
/// 是否使用内部的脚本支持,默认为真
pub enable_builtin_enhanced: Option<bool>,
@@ -523,6 +526,7 @@ impl IVerge {
patch!(default_latency_test);
patch!(default_latency_timeout);
patch!(enable_auto_delay_detection);
patch!(auto_delay_detection_interval_minutes);
patch!(enable_builtin_enhanced);
patch!(proxy_layout_column);
patch!(test_list);

View File

@@ -1,13 +1,12 @@
use super::CoreManager;
use crate::{
config::{Config, ConfigType},
config::{Config, ConfigType, runtime::IRuntime},
constants::timing,
core::{handle, validate::CoreConfigValidator},
utils::{dirs, help},
};
use anyhow::{Result, anyhow};
use clash_verge_logging::{Type, logging};
use clash_verge_types::runtime::IRuntime;
use smartstring::alias::String;
use std::{collections::HashSet, path::PathBuf, time::Instant};
use tauri_plugin_mihomo::Error as MihomoError;

View File

@@ -25,6 +25,7 @@ pub struct TimerTask {
pub last_run: i64, // Timestamp of last execution
}
// TODO 一个 Timer 负责轻量, 一个 Timer 负责订阅更新。当前会生产 N(订阅数量) + 1 个定时任务
pub struct Timer {
/// cron manager
pub delay_timer: Arc<RwLock<DelayTimer>>,

View File

@@ -61,14 +61,11 @@ pub fn use_sort(config: Mapping) -> Mapping {
ret
}
pub fn use_keys(config: &Mapping) -> Vec<String> {
config
.iter()
.filter_map(|(key, _)| key.as_str())
.map(|s: &str| {
let mut s: String = s.into();
s.make_ascii_lowercase();
s
})
.collect()
#[inline]
pub fn use_keys<'a>(config: &'a Mapping) -> impl Iterator<Item = String> + 'a {
config.iter().filter_map(|(key, _)| key.as_str()).map(|s: &str| {
let mut s: String = s.into();
s.make_ascii_lowercase();
s
})
}

View File

@@ -310,7 +310,7 @@ fn process_global_items(
profile_name: &String,
) -> (Mapping, Vec<String>, HashMap<String, ResultLog>) {
let mut result_map = HashMap::new();
let mut exists_keys = use_keys(&config);
let mut exists_keys = use_keys(&config).collect::<Vec<_>>();
if let ChainType::Merge(merge) = global_merge.data {
exists_keys.extend(use_keys(&merge));

View File

@@ -19,7 +19,6 @@ use crate::{
use anyhow::Result;
use clash_verge_logging::{Type, logging};
use once_cell::sync::OnceCell;
use std::time::Duration;
use tauri::{AppHandle, Manager as _};
#[cfg(target_os = "macos")]
use tauri_plugin_autostart::MacosLauncher;
@@ -61,11 +60,11 @@ mod app_init {
.socket_path(crate::config::IClashTemp::guard_external_controller_ipc())
.pool_config(
tauri_plugin_mihomo::IpcPoolConfigBuilder::new()
.min_connections(1)
.min_connections(3)
.max_connections(32)
.idle_timeout(std::time::Duration::from_secs(60))
.health_check_interval(std::time::Duration::from_secs(60))
.reject_policy(RejectPolicy::Timeout(Duration::from_secs(3)))
.reject_policy(RejectPolicy::Wait)
.build(),
)
.build(),

View File

@@ -1,5 +1,5 @@
{
"version": "2.4.5-rc.2",
"version": "2.4.6",
"$schema": "../node_modules/@tauri-apps/cli/config.schema.json",
"bundle": {
"active": true,

View File

@@ -21,20 +21,14 @@ import {
ListItem,
ListItemText,
} from "@mui/material";
import type { Column } from "@tanstack/react-table";
import { useCallback, useMemo } from "react";
import { useTranslation } from "react-i18next";
interface ColumnOption {
field: string;
label: string;
visible: boolean;
}
interface Props {
open: boolean;
columns: ColumnOption[];
columns: Column<IConnectionsItem, unknown>[];
onClose: () => void;
onToggle: (field: string, visible: boolean) => void;
onOrderChange: (order: string[]) => void;
onReset: () => void;
}
@@ -43,7 +37,6 @@ export const ConnectionColumnManager = ({
open,
columns,
onClose,
onToggle,
onOrderChange,
onReset,
}: Props) => {
@@ -54,9 +47,9 @@ export const ConnectionColumnManager = ({
);
const { t } = useTranslation();
const items = useMemo(() => columns.map((column) => column.field), [columns]);
const items = useMemo(() => columns.map((column) => column.id), [columns]);
const visibleCount = useMemo(
() => columns.filter((column) => column.visible).length,
() => columns.filter((column) => column.getIsVisible()).length,
[columns],
);
@@ -65,7 +58,7 @@ export const ConnectionColumnManager = ({
const { active, over } = event;
if (!over || active.id === over.id) return;
const order = columns.map((column) => column.field);
const order = columns.map((column) => column.id);
const oldIndex = order.indexOf(active.id as string);
const newIndex = order.indexOf(over.id as string);
if (oldIndex === -1 || newIndex === -1) return;
@@ -94,13 +87,16 @@ export const ConnectionColumnManager = ({
>
{columns.map((column) => (
<SortableColumnItem
key={column.field}
key={column.id}
column={column}
onToggle={onToggle}
label={getColumnLabel(column)}
dragHandleLabel={t(
"connections.components.columnManager.dragHandle",
)}
disableToggle={column.visible && visibleCount <= 1}
disableToggle={
!column.getCanHide() ||
(column.getIsVisible() && visibleCount <= 1)
}
/>
))}
</List>
@@ -120,15 +116,15 @@ export const ConnectionColumnManager = ({
};
interface SortableColumnItemProps {
column: ColumnOption;
onToggle: (field: string, visible: boolean) => void;
column: Column<IConnectionsItem, unknown>;
label: string;
dragHandleLabel: string;
disableToggle?: boolean;
}
const SortableColumnItem = ({
column,
onToggle,
label,
dragHandleLabel,
disableToggle = false,
}: SortableColumnItemProps) => {
@@ -139,7 +135,7 @@ const SortableColumnItem = ({
transform,
transition,
isDragging,
} = useSortable({ id: column.field });
} = useSortable({ id: column.id });
const style = useMemo(
() => ({
@@ -167,12 +163,12 @@ const SortableColumnItem = ({
>
<Checkbox
edge="start"
checked={column.visible}
checked={column.getIsVisible()}
disabled={disableToggle}
onChange={(event) => onToggle(column.field, event.target.checked)}
onChange={(event) => column.toggleVisibility(event.target.checked)}
/>
<ListItemText
primary={column.label}
primary={label}
slotProps={{ primary: { variant: "body2" } }}
sx={{ mr: 1 }}
/>
@@ -189,3 +185,11 @@ const SortableColumnItem = ({
</ListItem>
);
};
const getColumnLabel = (column: Column<IConnectionsItem, unknown>) => {
const meta = column.columnDef.meta as { label?: string } | undefined;
if (meta?.label) return meta.label;
const header = column.columnDef.header;
return typeof header === "string" ? header : column.id;
};

View File

@@ -2,6 +2,7 @@ import { ViewColumnRounded } from "@mui/icons-material";
import { Box, IconButton, Tooltip } from "@mui/material";
import {
ColumnDef,
ColumnOrderState,
ColumnSizingState,
flexRender,
getCoreRowModel,
@@ -43,50 +44,57 @@ const reconcileColumnOrder = (
return [...filtered, ...missing];
};
const createConnectionRow = (each: IConnectionsItem) => {
type ColumnField =
| "host"
| "download"
| "upload"
| "dlSpeed"
| "ulSpeed"
| "chains"
| "rule"
| "process"
| "time"
| "source"
| "remoteDestination"
| "type";
const getConnectionCellValue = (field: ColumnField, each: IConnectionsItem) => {
const { metadata, rulePayload } = each;
const chains = [...each.chains].reverse().join(" / ");
const rule = rulePayload ? `${each.rule}(${rulePayload})` : each.rule;
const destination = metadata.destinationIP
? `${metadata.destinationIP}:${metadata.destinationPort}`
: `${metadata.remoteDestination}:${metadata.destinationPort}`;
return {
id: each.id,
host: metadata.host
? `${metadata.host}:${metadata.destinationPort}`
: `${metadata.remoteDestination}:${metadata.destinationPort}`,
download: each.download,
upload: each.upload,
dlSpeed: each.curDownload,
ulSpeed: each.curUpload,
chains,
rule,
process: truncateStr(metadata.process || metadata.processPath),
time: each.start,
source: `${metadata.sourceIP}:${metadata.sourcePort}`,
remoteDestination: destination,
type: `${metadata.type}(${metadata.network})`,
connectionData: each,
};
switch (field) {
case "host":
return metadata.host
? `${metadata.host}:${metadata.destinationPort}`
: `${metadata.remoteDestination}:${metadata.destinationPort}`;
case "download":
return each.download;
case "upload":
return each.upload;
case "dlSpeed":
return each.curDownload;
case "ulSpeed":
return each.curUpload;
case "chains":
return [...each.chains].reverse().join(" / ");
case "rule":
return rulePayload ? `${each.rule}(${rulePayload})` : each.rule;
case "process":
return truncateStr(metadata.process || metadata.processPath);
case "time":
return each.start;
case "source":
return `${metadata.sourceIP}:${metadata.sourcePort}`;
case "remoteDestination":
return metadata.destinationIP
? `${metadata.destinationIP}:${metadata.destinationPort}`
: `${metadata.remoteDestination}:${metadata.destinationPort}`;
case "type":
return `${metadata.type}(${metadata.network})`;
default:
return "";
}
};
type ConnectionRow = ReturnType<typeof createConnectionRow>;
const areRowsEqual = (a: ConnectionRow, b: ConnectionRow) =>
a.host === b.host &&
a.download === b.download &&
a.upload === b.upload &&
a.dlSpeed === b.dlSpeed &&
a.ulSpeed === b.ulSpeed &&
a.chains === b.chains &&
a.rule === b.rule &&
a.process === b.process &&
a.time === b.time &&
a.source === b.source &&
a.remoteDestination === b.remoteDestination &&
a.type === b.type;
interface Props {
connections: IConnectionsItem[];
onShowDetail: (data: IConnectionsItem) => void;
@@ -104,33 +112,30 @@ export const ConnectionTable = (props: Props) => {
onCloseColumnManager,
} = props;
const { t } = useTranslation();
const [columnWidths, setColumnWidths] = useLocalStorage<
Record<string, number>
>(
const [columnWidths, setColumnWidths] = useLocalStorage<ColumnSizingState>(
"connection-table-widths",
// server-side value, this is the default value used by server-side rendering (if any)
// Do not omit (otherwise a Suspense boundary will be triggered)
{},
);
const [columnVisibilityModel, setColumnVisibilityModel] = useLocalStorage<
Partial<Record<string, boolean>>
>(
"connection-table-visibility",
{},
{
serializer: JSON.stringify,
deserializer: (value) => {
try {
const parsed = JSON.parse(value);
if (parsed && typeof parsed === "object") return parsed;
} catch (err) {
console.warn("Failed to parse connection-table-visibility", err);
}
return {};
const [columnVisibilityModel, setColumnVisibilityModel] =
useLocalStorage<VisibilityState>(
"connection-table-visibility",
{},
{
serializer: JSON.stringify,
deserializer: (value) => {
try {
const parsed = JSON.parse(value);
if (parsed && typeof parsed === "object") return parsed;
} catch (err) {
console.warn("Failed to parse connection-table-visibility", err);
}
return {};
},
},
},
);
);
const [columnOrder, setColumnOrder] = useLocalStorage<string[]>(
"connection-table-order",
@@ -149,15 +154,13 @@ export const ConnectionTable = (props: Props) => {
},
);
type ColumnField = Exclude<keyof ConnectionRow, "connectionData">;
interface BaseColumn {
field: ColumnField;
headerName: string;
width?: number;
minWidth?: number;
align?: "left" | "right";
cell?: (row: ConnectionRow) => ReactNode;
cell?: (row: IConnectionsItem) => ReactNode;
}
const baseColumns = useMemo<BaseColumn[]>(() => {
@@ -190,7 +193,7 @@ export const ConnectionTable = (props: Props) => {
width: 76,
minWidth: 60,
align: "right",
cell: (row) => `${parseTraffic(row.dlSpeed).join(" ")}/s`,
cell: (row) => `${parseTraffic(row.curDownload).join(" ")}/s`,
},
{
field: "ulSpeed",
@@ -198,7 +201,7 @@ export const ConnectionTable = (props: Props) => {
width: 76,
minWidth: 60,
align: "right",
cell: (row) => `${parseTraffic(row.ulSpeed).join(" ")}/s`,
cell: (row) => `${parseTraffic(row.curUpload).join(" ")}/s`,
},
{
field: "chains",
@@ -262,177 +265,76 @@ export const ConnectionTable = (props: Props) => {
});
}, [baseColumns, setColumnOrder]);
const columns = useMemo<BaseColumn[]>(() => {
const order = Array.isArray(columnOrder) ? columnOrder : [];
const orderMap = new Map(order.map((field, index) => [field, index]));
return [...baseColumns].sort((a, b) => {
const aIndex = orderMap.has(a.field)
? (orderMap.get(a.field) as number)
: Number.MAX_SAFE_INTEGER;
const bIndex = orderMap.has(b.field)
? (orderMap.get(b.field) as number)
: Number.MAX_SAFE_INTEGER;
if (aIndex === bIndex) {
return order.indexOf(a.field) - order.indexOf(b.field);
}
return aIndex - bIndex;
});
}, [baseColumns, columnOrder]);
const visibleColumnsCount = useMemo(() => {
return columns.reduce((count, column) => {
return (columnVisibilityModel?.[column.field] ?? true) !== false
? count + 1
: count;
}, 0);
}, [columns, columnVisibilityModel]);
const handleToggleColumn = useCallback(
(field: string, visible: boolean) => {
if (!visible && visibleColumnsCount <= 1) {
return;
}
const handleColumnVisibilityChange = useCallback(
(update: Updater<VisibilityState>) => {
setColumnVisibilityModel((prev) => {
const next = { ...(prev ?? {}) };
if (visible) {
delete next[field];
} else {
next[field] = false;
const current = prev ?? {};
const nextState =
typeof update === "function" ? update(current) : update;
const visibleCount = baseColumns.reduce((count, column) => {
const isVisible = (nextState[column.field] ?? true) !== false;
return count + (isVisible ? 1 : 0);
}, 0);
if (visibleCount === 0) {
return current;
}
return next;
const sanitized: VisibilityState = {};
baseColumns.forEach((column) => {
if (nextState[column.field] === false) {
sanitized[column.field] = false;
}
});
return sanitized;
});
},
[setColumnVisibilityModel, visibleColumnsCount],
[baseColumns, setColumnVisibilityModel],
);
const handleManagerOrderChange = useCallback(
(order: string[]) => {
setColumnOrder(() => {
const handleColumnOrderChange = useCallback(
(update: Updater<ColumnOrderState>) => {
setColumnOrder((prev) => {
const current = Array.isArray(prev) ? prev : [];
const nextState =
typeof update === "function" ? update(current) : update;
const baseFields = baseColumns.map((col) => col.field);
return reconcileColumnOrder(order, baseFields);
return reconcileColumnOrder(nextState, baseFields);
});
},
[baseColumns, setColumnOrder],
);
const handleResetColumns = useCallback(() => {
setColumnVisibilityModel({});
setColumnOrder(baseColumns.map((col) => col.field));
}, [baseColumns, setColumnOrder, setColumnVisibilityModel]);
const handleColumnVisibilityChange = useCallback(
(update: Updater<VisibilityState>) => {
setColumnVisibilityModel((prev) => {
const current = prev ?? {};
const baseState: VisibilityState = {};
columns.forEach((column) => {
baseState[column.field] = (current[column.field] ?? true) !== false;
});
const mergedState =
typeof update === "function"
? update(baseState)
: { ...baseState, ...update };
const hiddenFields = columns
.filter((column) => mergedState[column.field] === false)
.map((column) => column.field);
if (columns.length - hiddenFields.length === 0) {
return current;
}
const sanitized: Partial<Record<string, boolean>> = {};
hiddenFields.forEach((field) => {
sanitized[field] = false;
});
return sanitized;
});
},
[columns, setColumnVisibilityModel],
);
const columnVisibilityState = useMemo<VisibilityState>(() => {
const result: VisibilityState = {};
if (!columnVisibilityModel) {
columns.forEach((column) => {
result[column.field] = true;
});
return result;
}
columns.forEach((column) => {
result[column.field] =
(columnVisibilityModel?.[column.field] ?? true) !== false;
});
return result;
}, [columnVisibilityModel, columns]);
const columnOptions = useMemo(() => {
return columns.map((column) => ({
field: column.field,
label: column.headerName ?? column.field,
visible: (columnVisibilityModel?.[column.field] ?? true) !== false,
}));
}, [columns, columnVisibilityModel]);
const prevRowsRef = useRef<Map<string, ConnectionRow>>(new Map());
const connRows = useMemo<ConnectionRow[]>(() => {
const prevMap = prevRowsRef.current;
const nextMap = new Map<string, ConnectionRow>();
const nextRows = connections.map((each) => {
const nextRow = createConnectionRow(each);
const prevRow = prevMap.get(each.id);
if (prevRow && areRowsEqual(prevRow, nextRow)) {
nextMap.set(each.id, prevRow);
return prevRow;
}
nextMap.set(each.id, nextRow);
return nextRow;
});
prevRowsRef.current = nextMap;
return nextRows;
}, [connections]);
const [sorting, setSorting] = useState<SortingState>([]);
const [relativeNow, setRelativeNow] = useState(() => Date.now());
const columnDefs = useMemo<ColumnDef<ConnectionRow>[]>(() => {
return columns.map((column) => {
const baseCell: ColumnDef<ConnectionRow>["cell"] = column.cell
const columnDefs = useMemo<ColumnDef<IConnectionsItem>[]>(() => {
return baseColumns.map((column) => {
const baseCell: ColumnDef<IConnectionsItem>["cell"] = column.cell
? (ctx) => column.cell?.(ctx.row.original)
: (ctx) => ctx.getValue() as ReactNode;
const cell: ColumnDef<ConnectionRow>["cell"] =
const cell: ColumnDef<IConnectionsItem>["cell"] =
column.field === "time"
? (ctx) => dayjs(ctx.row.original.time).from(relativeNow)
? (ctx) => dayjs(ctx.getValue() as string).from(relativeNow)
: baseCell;
return {
id: column.field,
accessorKey: column.field,
accessorFn: (row) => getConnectionCellValue(column.field, row),
header: column.headerName,
size: column.width,
minSize: column.minWidth ?? 80,
enableResizing: true,
minSize: column.minWidth,
meta: {
align: column.align ?? "left",
field: column.field,
label: column.headerName,
},
cell,
} satisfies ColumnDef<ConnectionRow>;
} satisfies ColumnDef<IConnectionsItem>;
});
}, [columns, relativeNow]);
}, [baseColumns, relativeNow]);
useEffect(() => {
if (typeof window === "undefined") return undefined;
@@ -450,7 +352,7 @@ export const ConnectionTable = (props: Props) => {
const prevState = prev ?? {};
const nextState =
typeof updater === "function" ? updater(prevState) : updater;
const sanitized: Record<string, number> = {};
const sanitized: ColumnSizingState = {};
Object.entries(nextState).forEach(([key, size]) => {
if (typeof size === "number" && Number.isFinite(size)) {
sanitized[key] = size;
@@ -463,22 +365,45 @@ export const ConnectionTable = (props: Props) => {
);
const table = useReactTable({
data: connRows,
data: connections,
state: {
columnVisibility: columnVisibilityState,
columnVisibility: columnVisibilityModel ?? {},
columnSizing: columnWidths,
columnOrder,
sorting,
},
initialState: {
columnOrder: baseColumns.map((col) => col.field),
},
defaultColumn: {
minSize: 80,
enableResizing: true,
},
columnResizeMode: "onChange",
enableSortingRemoval: true,
getRowId: (row) => row.id,
getCoreRowModel: getCoreRowModel(),
getSortedRowModel: sorting.length ? getSortedRowModel() : undefined,
onSortingChange: setSorting,
onColumnSizingChange: handleColumnSizingChange,
onColumnVisibilityChange: handleColumnVisibilityChange,
onColumnOrderChange: handleColumnOrderChange,
columns: columnDefs,
});
const handleManagerOrderChange = useCallback(
(order: string[]) => {
const baseFields = baseColumns.map((col) => col.field);
table.setColumnOrder(reconcileColumnOrder(order, baseFields));
},
[baseColumns, table],
);
const handleResetColumns = useCallback(() => {
table.resetColumnVisibility();
table.resetColumnOrder();
}, [table]);
const rows = table.getRowModel().rows;
const tableContainerRef = useRef<HTMLDivElement | null>(null);
const rowVirtualizer = useVirtualizer({
@@ -491,6 +416,7 @@ export const ConnectionTable = (props: Props) => {
const virtualRows = rowVirtualizer.getVirtualItems();
const totalSize = rowVirtualizer.getTotalSize();
const tableWidth = table.getTotalSize();
const managerColumns = table.getAllLeafColumns();
return (
<>
@@ -669,7 +595,7 @@ export const ConnectionTable = (props: Props) => {
return (
<Box
key={row.id}
onClick={() => onShowDetail(row.original.connectionData)}
onClick={() => onShowDetail(row.original)}
sx={{
display: "flex",
position: "absolute",
@@ -726,9 +652,8 @@ export const ConnectionTable = (props: Props) => {
</Box>
<ConnectionColumnManager
open={columnManagerOpen}
columns={columnOptions}
columns={managerColumns}
onClose={onCloseColumnManager}
onToggle={handleToggleColumn}
onOrderChange={handleManagerOrderChange}
onReset={handleResetColumns}
/>

View File

@@ -27,8 +27,13 @@ import {
useTheme,
} from "@mui/material";
import { useLockFn } from "ahooks";
import React from "react";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import React, {
useCallback,
useEffect,
useMemo,
useRef,
useState,
} from "react";
import { useTranslation } from "react-i18next";
import { useNavigate } from "react-router";
import { delayGroup, healthcheckProxyProvider } from "tauri-plugin-mihomo-api";
@@ -46,8 +51,8 @@ const STORAGE_KEY_GROUP = "clash-verge-selected-proxy-group";
const STORAGE_KEY_PROXY = "clash-verge-selected-proxy";
const STORAGE_KEY_SORT_TYPE = "clash-verge-proxy-sort-type";
const AUTO_CHECK_INITIAL_DELAY_MS = 1500;
const AUTO_CHECK_INTERVAL_MS = 5 * 60 * 1000;
const AUTO_CHECK_DEFAULT_INTERVAL_MINUTES = 5;
const AUTO_CHECK_INITIAL_DELAY_MS = 100;
// 代理节点信息接口
interface ProxyOption {
@@ -106,6 +111,14 @@ export const CurrentProxyCard = () => {
const { current: currentProfile } = useProfiles();
const autoDelayEnabled = verge?.enable_auto_delay_detection ?? false;
const defaultLatencyTimeout = verge?.default_latency_timeout;
const autoDelayIntervalMs = useMemo(() => {
const rawInterval = verge?.auto_delay_detection_interval_minutes;
const intervalMinutes =
typeof rawInterval === "number" && rawInterval > 0
? rawInterval
: AUTO_CHECK_DEFAULT_INTERVAL_MINUTES;
return Math.max(1, Math.round(intervalMinutes)) * 60 * 1000;
}, [verge?.auto_delay_detection_interval_minutes]);
const currentProfileId = currentProfile?.uid || null;
const getProfileStorageKey = useCallback(
@@ -592,13 +605,13 @@ export const CurrentProxyCard = () => {
if (disposed) return;
await checkCurrentProxyDelay();
if (disposed) return;
intervalTimer = setTimeout(runAndSchedule, AUTO_CHECK_INTERVAL_MS);
intervalTimer = setTimeout(runAndSchedule, autoDelayIntervalMs);
};
initialTimer = setTimeout(async () => {
await checkCurrentProxyDelay();
if (disposed) return;
intervalTimer = setTimeout(runAndSchedule, AUTO_CHECK_INTERVAL_MS);
intervalTimer = setTimeout(runAndSchedule, autoDelayIntervalMs);
}, AUTO_CHECK_INITIAL_DELAY_MS);
return () => {
@@ -608,6 +621,7 @@ export const CurrentProxyCard = () => {
};
}, [
checkCurrentProxyDelay,
autoDelayIntervalMs,
isDirectMode,
state.selection.group,
state.selection.proxy,

View File

@@ -8,6 +8,7 @@ import { Box, Button, IconButton, Skeleton, Typography } from "@mui/material";
import { memo, useCallback, useEffect, useRef, useState } from "react";
import { useTranslation } from "react-i18next";
import { useAppData } from "@/providers/app-data-context";
import { getIpInfo } from "@/services/api";
import { EnhancedCard } from "./enhanced-card";
@@ -55,6 +56,7 @@ const getCountryFlag = (countryCode: string) => {
// IP信息卡片组件
export const IpInfoCard = () => {
const { t } = useTranslation();
const { clashConfig } = useAppData();
const [ipInfo, setIpInfo] = useState<any>(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState("");
@@ -90,6 +92,20 @@ export const IpInfoCard = () => {
console.warn("Failed to read IP info from sessionStorage:", e);
}
if (typeof navigator !== "undefined" && !navigator.onLine) {
setLoading(false);
lastFetchRef.current = Date.now();
setCountdown(IP_REFRESH_SECONDS);
return;
}
if (!clashConfig) {
setLoading(false);
lastFetchRef.current = Date.now();
setCountdown(IP_REFRESH_SECONDS);
return;
}
try {
setLoading(true);
const data = await getIpInfo();
@@ -113,11 +129,13 @@ export const IpInfoCard = () => {
? err.message
: t("home.components.ipInfo.errors.load"),
);
lastFetchRef.current = Date.now();
setCountdown(IP_REFRESH_SECONDS);
} finally {
setLoading(false);
}
},
[t],
[t, clashConfig],
);
// 组件加载时获取IP信息并启动基于上次请求时间的倒计时

View File

@@ -16,11 +16,16 @@ import { useTranslation } from "react-i18next";
import { useVerge } from "@/hooks/use-verge";
import { saveWebdavConfig, createWebdavBackup } from "@/services/cmds";
import { showNotice } from "@/services/notice-service";
import {
buildWebdavSignature,
getWebdavStatus,
setWebdavStatus,
} from "@/services/webdav-status";
import { isValidUrl } from "@/utils/helper";
interface BackupConfigViewerProps {
onBackupSuccess: () => Promise<void>;
onSaveSuccess: () => Promise<void>;
onSaveSuccess: (signature?: string) => Promise<void>;
onRefresh: () => Promise<void>;
onInit: () => Promise<void>;
setLoading: (loading: boolean) => void;
@@ -35,7 +40,7 @@ export const BackupConfigViewer = memo(
setLoading,
}: BackupConfigViewerProps) => {
const { t } = useTranslation();
const { verge } = useVerge();
const { verge, mutateVerge } = useVerge();
const { webdav_url, webdav_username, webdav_password } = verge || {};
const [showPassword, setShowPassword] = useState(false);
const usernameRef = useRef<HTMLInputElement>(null);
@@ -58,6 +63,10 @@ export const BackupConfigViewer = memo(
webdav_username !== username ||
webdav_password !== password;
const webdavSignature = buildWebdavSignature(verge);
const webdavStatus = getWebdavStatus(webdavSignature);
const shouldAutoInit = webdavStatus !== "failed";
const handleClickShowPassword = () => {
setShowPassword((prev) => !prev);
};
@@ -66,8 +75,11 @@ export const BackupConfigViewer = memo(
if (!webdav_url || !webdav_username || !webdav_password) {
return;
}
if (!shouldAutoInit) {
return;
}
void onInit();
}, [webdav_url, webdav_username, webdav_password, onInit]);
}, [webdav_url, webdav_username, webdav_password, onInit, shouldAutoInit]);
const checkForm = () => {
const username = usernameRef.current?.value;
@@ -97,18 +109,32 @@ export const BackupConfigViewer = memo(
const save = useLockFn(async (data: IWebDavConfig) => {
checkForm();
const signature = buildWebdavSignature({
webdav_url: data.url,
webdav_username: data.username,
webdav_password: data.password,
});
const trimmedUrl = data.url.trim();
const trimmedUsername = data.username.trim();
try {
setLoading(true);
await saveWebdavConfig(
data.url.trim(),
data.username.trim(),
data.password,
).then(() => {
showNotice.success(
"settings.modals.backup.messages.webdavConfigSaved",
);
onSaveSuccess();
});
await saveWebdavConfig(trimmedUrl, trimmedUsername, data.password);
await mutateVerge(
(current) =>
current
? {
...current,
webdav_url: trimmedUrl,
webdav_username: trimmedUsername,
webdav_password: data.password,
}
: current,
false,
);
setWebdavStatus(signature, "unknown");
showNotice.success("settings.modals.backup.messages.webdavConfigSaved");
await onSaveSuccess(signature);
} catch (error) {
showNotice.error(
"settings.modals.backup.messages.webdavConfigSaveFailed",
@@ -122,16 +148,24 @@ export const BackupConfigViewer = memo(
const handleBackup = useLockFn(async () => {
checkForm();
const signature = buildWebdavSignature({
webdav_url: url,
webdav_username: username,
webdav_password: password,
});
try {
setLoading(true);
await createWebdavBackup().then(async () => {
showNotice.success("settings.modals.backup.messages.backupCreated");
await onBackupSuccess();
});
setWebdavStatus(signature, "ready");
} catch (error) {
showNotice.error("settings.modals.backup.messages.backupFailed", {
error,
});
setWebdavStatus(signature, "failed");
} finally {
setLoading(false);
}

View File

@@ -36,6 +36,11 @@ import {
restoreWebDavBackup,
} from "@/services/cmds";
import { showNotice } from "@/services/notice-service";
import {
buildWebdavSignature,
getWebdavStatus,
setWebdavStatus,
} from "@/services/webdav-status";
dayjs.extend(customParseFormat);
dayjs.extend(relativeTime);
@@ -85,6 +90,8 @@ export const BackupHistoryViewer = ({
const isWebDavConfigured = Boolean(
verge?.webdav_url && verge?.webdav_username && verge?.webdav_password,
);
const webdavSignature = buildWebdavSignature(verge);
const webdavStatus = getWebdavStatus(webdavSignature);
const shouldSkipWebDav = !isLocal && !isWebDavConfigured;
const pageSize = 8;
const isBusy = loading || isRestoring || isRestarting;
@@ -128,33 +135,49 @@ export const BackupHistoryViewer = ({
[t],
);
const fetchRows = useCallback(async () => {
if (!open) return;
if (shouldSkipWebDav) {
setRows([]);
return;
}
setLoading(true);
try {
const list = isLocal ? await listLocalBackup() : await listWebDavBackup();
setRows(
list
.map((item) => buildRow(item))
.filter((item): item is BackupRow => item !== null)
.sort((a, b) =>
a.sort_value === b.sort_value
? b.filename.localeCompare(a.filename)
: b.sort_value - a.sort_value,
),
);
} catch (error) {
console.error(error);
setRows([]);
showNotice.error(error);
} finally {
setLoading(false);
}
}, [buildRow, isLocal, open, shouldSkipWebDav]);
const fetchRows = useCallback(
async (options?: { force?: boolean }) => {
if (!open) return;
if (shouldSkipWebDav) {
setRows([]);
return;
}
if (!isLocal && webdavStatus === "failed" && !options?.force) {
setRows([]);
return;
}
setLoading(true);
try {
const list = isLocal
? await listLocalBackup()
: await listWebDavBackup();
if (!isLocal) {
setWebdavStatus(webdavSignature, "ready");
}
setRows(
list
.map((item) => buildRow(item))
.filter((item): item is BackupRow => item !== null)
.sort((a, b) =>
a.sort_value === b.sort_value
? b.filename.localeCompare(a.filename)
: b.sort_value - a.sort_value,
),
);
} catch (error) {
if (!isLocal) {
setWebdavStatus(webdavSignature, "failed");
}
console.error(error);
setRows([]);
showNotice.error(error);
} finally {
setLoading(false);
}
},
[buildRow, isLocal, open, shouldSkipWebDav, webdavSignature, webdavStatus],
);
useEffect(() => {
void fetchRows();
@@ -169,7 +192,7 @@ export const BackupHistoryViewer = ({
);
const summary = useMemo(() => {
if (shouldSkipWebDav) {
if (shouldSkipWebDav || (!isLocal && webdavStatus === "failed")) {
return t("settings.modals.backup.manual.webdav");
}
if (!total) return t("settings.modals.backup.history.empty");
@@ -179,7 +202,7 @@ export const BackupHistoryViewer = ({
count: total,
recent,
});
}, [rows, shouldSkipWebDav, t, total]);
}, [isLocal, rows, shouldSkipWebDav, t, total, webdavStatus]);
const handleDelete = useLockFn(async (filename: string) => {
if (isRestarting) return;
@@ -241,7 +264,7 @@ export const BackupHistoryViewer = ({
const handleRefresh = () => {
if (isRestarting) return;
void fetchRows();
void fetchRows({ force: true });
};
return (

View File

@@ -14,12 +14,17 @@ import { useCallback, useImperativeHandle, useState } from "react";
import { useTranslation } from "react-i18next";
import { BaseDialog, DialogRef } from "@/components/base";
import { useVerge } from "@/hooks/use-verge";
import {
createLocalBackup,
createWebdavBackup,
importLocalBackup,
} from "@/services/cmds";
import { showNotice } from "@/services/notice-service";
import {
buildWebdavSignature,
setWebdavStatus,
} from "@/services/webdav-status";
import { AutoBackupSettings } from "./auto-backup-settings";
import { BackupHistoryViewer } from "./backup-history-viewer";
@@ -29,6 +34,7 @@ type BackupSource = "local" | "webdav";
export function BackupViewer({ ref }: { ref?: Ref<DialogRef> }) {
const { t } = useTranslation();
const { verge } = useVerge();
const [open, setOpen] = useState(false);
const [busyAction, setBusyAction] = useState<BackupSource | null>(null);
const [localImporting, setLocalImporting] = useState(false);
@@ -36,6 +42,7 @@ export function BackupViewer({ ref }: { ref?: Ref<DialogRef> }) {
const [historySource, setHistorySource] = useState<BackupSource>("local");
const [historyPage, setHistoryPage] = useState(0);
const [webdavDialogOpen, setWebdavDialogOpen] = useState(false);
const webdavSignature = buildWebdavSignature(verge);
useImperativeHandle(ref, () => ({
open: () => setOpen(true),
@@ -59,6 +66,7 @@ export function BackupViewer({ ref }: { ref?: Ref<DialogRef> }) {
} else {
await createWebdavBackup();
showNotice.success("settings.modals.backup.messages.backupCreated");
setWebdavStatus(webdavSignature, "ready");
}
} catch (error) {
console.error(error);
@@ -68,6 +76,9 @@ export function BackupViewer({ ref }: { ref?: Ref<DialogRef> }) {
: "settings.modals.backup.messages.backupFailed",
target === "local" ? undefined : { error },
);
if (target === "webdav") {
setWebdavStatus(webdavSignature, "failed");
}
} finally {
setBusyAction(null);
}

View File

@@ -3,8 +3,13 @@ import { useCallback, useState } from "react";
import { useTranslation } from "react-i18next";
import { BaseDialog, BaseLoadingOverlay } from "@/components/base";
import { useVerge } from "@/hooks/use-verge";
import { listWebDavBackup } from "@/services/cmds";
import { showNotice } from "@/services/notice-service";
import {
buildWebdavSignature,
setWebdavStatus,
} from "@/services/webdav-status";
import { BackupConfigViewer } from "./backup-config-viewer";
@@ -22,7 +27,9 @@ export const BackupWebdavDialog = ({
setBusy,
}: BackupWebdavDialogProps) => {
const { t } = useTranslation();
const { verge } = useVerge();
const [loading, setLoading] = useState(false);
const webdavSignature = buildWebdavSignature(verge);
const handleLoading = useCallback(
(value: boolean) => {
@@ -33,16 +40,19 @@ export const BackupWebdavDialog = ({
);
const refreshWebdav = useCallback(
async (options?: { silent?: boolean }) => {
async (options?: { silent?: boolean; signature?: string }) => {
const signature = options?.signature ?? webdavSignature;
handleLoading(true);
try {
await listWebDavBackup();
setWebdavStatus(signature, "ready");
if (!options?.silent) {
showNotice.success(
"settings.modals.backup.messages.webdavRefreshSuccess",
);
}
} catch (error) {
setWebdavStatus(signature, "failed");
showNotice.error(
"settings.modals.backup.messages.webdavRefreshFailed",
{ error },
@@ -51,11 +61,11 @@ export const BackupWebdavDialog = ({
handleLoading(false);
}
},
[handleLoading],
[handleLoading, webdavSignature],
);
const refreshSilently = useCallback(
() => refreshWebdav({ silent: true }),
(signature?: string) => refreshWebdav({ silent: true, signature }),
[refreshWebdav],
);

View File

@@ -29,6 +29,7 @@ export const MiscViewer = forwardRef<DialogRef>((props, ref) => {
enableBuiltinEnhanced: true,
proxyLayoutColumn: 6,
enableAutoDelayDetection: false,
autoDelayDetectionIntervalMinutes: 5,
defaultLatencyTest: "",
autoLogClean: 2,
defaultLatencyTimeout: 10000,
@@ -46,6 +47,8 @@ export const MiscViewer = forwardRef<DialogRef>((props, ref) => {
enableBuiltinEnhanced: verge?.enable_builtin_enhanced ?? true,
proxyLayoutColumn: verge?.proxy_layout_column || 6,
enableAutoDelayDetection: verge?.enable_auto_delay_detection ?? false,
autoDelayDetectionIntervalMinutes:
verge?.auto_delay_detection_interval_minutes ?? 5,
defaultLatencyTest: verge?.default_latency_test || "",
autoLogClean: verge?.auto_log_clean || 0,
defaultLatencyTimeout: verge?.default_latency_timeout || 10000,
@@ -65,6 +68,8 @@ export const MiscViewer = forwardRef<DialogRef>((props, ref) => {
enable_builtin_enhanced: values.enableBuiltinEnhanced,
proxy_layout_column: values.proxyLayoutColumn,
enable_auto_delay_detection: values.enableAutoDelayDetection,
auto_delay_detection_interval_minutes:
values.autoDelayDetectionIntervalMinutes,
default_latency_test: values.defaultLatencyTest,
default_latency_timeout: values.defaultLatencyTimeout,
auto_log_clean: values.autoLogClean as any,
@@ -323,6 +328,44 @@ export const MiscViewer = forwardRef<DialogRef>((props, ref) => {
/>
</ListItem>
<ListItem sx={{ padding: "5px 2px" }}>
<ListItemText
primary={t(
"settings.modals.misc.fields.autoDelayDetectionInterval",
)}
sx={{ maxWidth: "fit-content" }}
/>
<TextField
autoComplete="new-password"
size="small"
type="number"
autoCorrect="off"
autoCapitalize="off"
spellCheck="false"
sx={{ width: 160, marginLeft: "auto" }}
value={values.autoDelayDetectionIntervalMinutes}
disabled={!values.enableAutoDelayDetection}
onChange={(e) => {
const parsed = parseInt(e.target.value, 10);
const intervalMinutes =
Number.isFinite(parsed) && parsed > 0 ? parsed : 1;
setValues((v) => ({
...v,
autoDelayDetectionIntervalMinutes: intervalMinutes,
}));
}}
slotProps={{
input: {
endAdornment: (
<InputAdornment position="end">
{t("shared.units.minutes")}
</InputAdornment>
),
},
}}
/>
</ListItem>
<ListItem sx={{ padding: "5px 2px" }}>
<ListItemText
primary={t("settings.modals.misc.fields.defaultLatencyTest")}

View File

@@ -22,8 +22,7 @@ import {
useState,
} from "react";
import { useTranslation } from "react-i18next";
import useSWR, { mutate } from "swr";
import { getBaseConfig } from "tauri-plugin-mihomo-api";
import { mutate } from "swr";
import {
BaseDialog,
@@ -113,6 +112,8 @@ export const SysproxyViewer = forwardRef<DialogRef>((props, ref) => {
type AutoProxy = Awaited<ReturnType<typeof getAutotemProxy>>;
const [autoproxy, setAutoproxy] = useState<AutoProxy>();
const { clashConfig } = useAppData();
const {
enable_system_proxy: enabled,
proxy_auto_config,
@@ -148,13 +149,6 @@ export const SysproxyViewer = forwardRef<DialogRef>((props, ref) => {
return "127.0.0.1,192.168.0.0/16,10.0.0.0/8,172.16.0.0/12,localhost,*.local,*.crashlytics.com,<local>";
};
const { data: clashConfig } = useSWR("getClashConfig", getBaseConfig, {
revalidateOnFocus: false,
revalidateIfStale: true,
dedupingInterval: 1000,
errorRetryInterval: 5000,
});
const prevMixedPortRef = useRef(clashConfig?.mixedPort);
useEffect(() => {

View File

@@ -4,7 +4,7 @@ import { mutate, type MutatorCallback } from "swr";
import useSWRSubscription from "swr/subscription";
import { type Message, type MihomoWebSocket } from "tauri-plugin-mihomo-api";
export const RECONNECT_DELAY_MS = 500;
export const RECONNECT_DELAY_MS = 100;
type NextFn<T> = (error?: any, data?: T | MutatorCallback<T>) => void;

View File

@@ -48,7 +48,7 @@ const WORKER_CONFIG = {
rawDataMinutes: 10,
compressedDataMinutes: 60,
compressionRatio: 5,
snapshotIntervalMs: 250,
snapshotIntervalMs: 100,
defaultRangeMinutes: 10,
};

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "أعمدة عرض الوكيل",
"autoLogClean": "تنظيف السجلات تلقائيًا",
"autoDelayDetection": "اكتشاف التأخير التلقائي",
"autoDelayDetectionInterval": "الفاصل الزمني لاكتشاف التأخير التلقائي",
"defaultLatencyTest": "اختبار التأخير الافتراضي",
"defaultLatencyTimeout": "مهلة التأخير الافتراضية"
},

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "Anzahl der Spalten im Proxy-Layout",
"autoLogClean": "Protokolle automatisch bereinigen",
"autoDelayDetection": "Automatische Latenzprüfung",
"autoDelayDetectionInterval": "Intervall für automatische Latenzprüfung",
"defaultLatencyTest": "Standard-Testlink",
"defaultLatencyTimeout": "Test-Timeout"
},

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "Proxy Layout Columns",
"autoLogClean": "Auto Log Clean",
"autoDelayDetection": "Auto Delay Detection",
"autoDelayDetectionInterval": "Auto Delay Detection Interval",
"defaultLatencyTest": "Default Latency Test",
"defaultLatencyTimeout": "Default Latency Timeout"
},

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "Número de columnas en la disposición de la página de proxy",
"autoLogClean": "Limpiar registros automáticamente",
"autoDelayDetection": "Detección automática de latencia",
"autoDelayDetectionInterval": "Intervalo de detección automática de latencia",
"defaultLatencyTest": "Enlace de prueba de latencia predeterminado",
"defaultLatencyTimeout": "Tiempo de espera de la prueba de latencia"
},

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "ستون چیدمان پراکسی",
"autoLogClean": "پاکسازی خودکار لاگ",
"autoDelayDetection": "تشخیص تأخیر خودکار",
"autoDelayDetectionInterval": "فاصله تشخیص تأخیر خودکار",
"defaultLatencyTest": "آزمون تأخیر پیش‌فرض",
"defaultLatencyTimeout": "زمان انتظار تأخیر پیش‌فرض"
},

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "Kolom Tata Letak Proksi",
"autoLogClean": "Pembersihan Log Otomatis",
"autoDelayDetection": "Deteksi Latensi Otomatis",
"autoDelayDetectionInterval": "Interval Deteksi Latensi Otomatis",
"defaultLatencyTest": "Tes Latensi Default",
"defaultLatencyTimeout": "Waktu Habis Latensi Default"
},

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "プロキシページのレイアウト列数",
"autoLogClean": "ログを自動的にクリーンアップ",
"autoDelayDetection": "自動遅延検出",
"autoDelayDetectionInterval": "自動遅延検出間隔",
"defaultLatencyTest": "デフォルトの遅延テストURL",
"defaultLatencyTimeout": "テストタイムアウト時間"
},

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "프록시 레이아웃 열 수",
"autoLogClean": "로그 자동 정리",
"autoDelayDetection": "자동 지연 감지",
"autoDelayDetectionInterval": "자동 지연 감지 간격",
"defaultLatencyTest": "기본 지연 테스트",
"defaultLatencyTimeout": "기본 지연 제한시간"
},

View File

@@ -121,14 +121,14 @@
"communication": "Core communication error"
},
"labels": {
"rule": "Режим правил",
"global": "Глобальный режим",
"direct": "Прямой режим"
"rule": "Rule",
"global": "Global",
"direct": "Direct"
},
"descriptions": {
"rule": "Automatically choose proxies according to the rule set.",
"global": "Forward all network requests through the selected proxy.",
"direct": "Bypass the proxy and connect to the internet directly."
"rule": "Автоматически выбирать прокси в зависимости от правил",
"global": "Перенаправлять все сетевые запросы через выбранный прокси",
"direct": "Подключение к интернету без прокси"
}
}
}

View File

@@ -73,17 +73,17 @@
"components": {
"enums": {
"strategies": {
"select": "Выбор прокси вручную",
"url-test": "Выбор прокси на основе задержки теста URL",
"fallback": "Переключение на другой прокси при ошибке",
"load-balance": "Распределение прокси на основе балансировки нагрузки",
"relay": "Передача через определенную цепочку прокси"
"select": "select",
"url-test": "url-test",
"fallback": "fallback",
"load-balance": "load-balance",
"relay": "relay"
},
"policies": {
"DIRECT": "Данные направляются напрямую наружу",
"REJECT": "Перехватывает запросы",
"REJECT-DROP": "Отклоняет запросы",
"PASS": "Пропускает это правило при совпадении"
"DIRECT": "DIRECT",
"REJECT": "REJECT",
"REJECT-DROP": "REJECT-DROP",
"PASS": "PASS"
}
}
}

View File

@@ -42,39 +42,39 @@
}
},
"ruleTypes": {
"DOMAIN": "Соответствует полному доменному имени",
"DOMAIN-SUFFIX": "Соответствует суффиксу домена",
"DOMAIN-KEYWORD": "Соответствует ключевому слову домена",
"DOMAIN-REGEX": "Соответствует домену с использованием регулярных выражений",
"GEOSITE": "Соответствует доменам в Geosite",
"GEOIP": "Соответствует коду страны IP-адреса",
"SRC-GEOIP": "Соответствует коду страны исходного IP-адреса",
"IP-ASN": "Соответствует ASN IP-адреса",
"SRC-IP-ASN": "Соответствует ASN исходного IP-адреса",
"IP-CIDR": "Соответствует диапазону IP-адресов",
"IP-CIDR6": "Соответствует диапазону IPv6-адресов",
"SRC-IP-CIDR": "Соответствует диапазону исходных IP-адресов",
"IP-SUFFIX": "Соответствует диапазону суффиксов IP-адресов",
"SRC-IP-SUFFIX": "Соответствует диапазону суффиксов исходных IP-адресов",
"SRC-PORT": "Соответствует диапазону исходных портов",
"DST-PORT": "Соответствует диапазону целевых портов",
"IN-PORT": "Соответствует входящему порту",
"DSCP": "Маркировка DSCP (только для tproxy UDP входящего)",
"PROCESS-NAME": "Соответствует имени процесса (имя пакета Android)",
"PROCESS-PATH": "Соответствует полному пути процесса",
"PROCESS-NAME-REGEX": "Соответствует имени процесса с использованием регулярных выражений (имя пакета Android)",
"PROCESS-PATH-REGEX": "Соответствует полному пути процесса с использованием регулярных выражений",
"NETWORK": "Соответствует транспортному протоколу (tcp/udp)",
"UID": "Соответствует USER ID в Linux",
"IN-TYPE": "Соответствует типу входящего соединения",
"IN-USER": "Соответствует имени пользователя входящего соединения",
"IN-NAME": "Соответствует имени входящего соединения",
"SUB-RULE": "Подправило",
"RULE-SET": "Соответствует набору правил",
"AND": "Логическое И",
"OR": "Логическое ИЛИ",
"NOT": "Логическое НЕ",
"MATCH": "Соответствует всем запросам"
"DOMAIN": "DOMAIN",
"DOMAIN-SUFFIX": "DOMAIN-SUFFIX",
"DOMAIN-KEYWORD": "DOMAIN-KEYWORD",
"DOMAIN-REGEX": "DOMAIN-REGEX",
"GEOSITE": "GEOSITE",
"GEOIP": "GEOIP",
"SRC-GEOIP": "SRC-GEOIP",
"IP-ASN": "IP-ASN",
"SRC-IP-ASN": "SRC-IP-ASN",
"IP-CIDR": "IP-CIDR",
"IP-CIDR6": "IP-CIDR6",
"SRC-IP-CIDR": "SRC-IP-CIDR",
"IP-SUFFIX": "IP-SUFFIX",
"SRC-IP-SUFFIX": "SRC-IP-SUFFIX",
"SRC-PORT": "SRC-PORT",
"DST-PORT": "DST-PORT",
"IN-PORT": "IN-PORT",
"DSCP": "DSCP",
"PROCESS-NAME": "PROCESS-NAME",
"PROCESS-PATH": "PROCESS-PATH",
"PROCESS-NAME-REGEX": "PROCESS-NAME-REGEX",
"PROCESS-PATH-REGEX": "PROCESS-PATH-REGEX",
"NETWORK": "NETWORK",
"UID": "UID",
"IN-TYPE": "IN-TYPE",
"IN-USER": "IN-USER",
"IN-NAME": "IN-NAME",
"SUB-RULE": "SUB-RULE",
"RULE-SET": "RULE-SET",
"AND": "AND",
"OR": "OR",
"NOT": "NOT",
"MATCH": "MATCH"
},
"title": "Редактировать правила"
}

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "Количество столбцов в макете прокси",
"autoLogClean": "Автоматическая очистка логов",
"autoDelayDetection": "Автоматическое измерение задержки",
"autoDelayDetectionInterval": "Интервал автоматического измерения задержки",
"defaultLatencyTest": "Ссылка на тест задержки",
"defaultLatencyTimeout": "Таймаут задержки по умолчанию"
},

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "Vekil Düzeni Sütunları",
"autoLogClean": "Otomatik Günlük Temizleme",
"autoDelayDetection": "Otomatik Gecikme Tespiti",
"autoDelayDetectionInterval": "Otomatik Gecikme Tespiti Aralığı",
"defaultLatencyTest": "Varsayılan Gecikme Testi",
"defaultLatencyTimeout": "Varsayılan Gecikme Zaman Aşımı"
},

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "Прокси күрсәтү баганалары саны",
"autoLogClean": "Логларны автоматик чистарту",
"autoDelayDetection": "Автоматик тоткарлык ачыклау",
"autoDelayDetectionInterval": "Автоматик тоткарлык ачыклау интервалы",
"defaultLatencyTest": "Тоткарлануны тикшерү сылтамасы (defaults)",
"defaultLatencyTimeout": "Тоткарлануның стандарт таймауты"
},

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "代理页布局列数",
"autoLogClean": "自动清理日志",
"autoDelayDetection": "自动延迟检测",
"autoDelayDetectionInterval": "自动延迟检测间隔",
"defaultLatencyTest": "默认测试链接",
"defaultLatencyTimeout": "测试超时时间"
},

View File

@@ -377,6 +377,7 @@
"proxyLayoutColumns": "代理頁面欄數",
"autoLogClean": "自動清理日誌",
"autoDelayDetection": "自動延遲偵測",
"autoDelayDetectionInterval": "自動延遲偵測間隔",
"defaultLatencyTest": "預設測試網址",
"defaultLatencyTimeout": "測試逾時"
},

View File

@@ -23,8 +23,8 @@ import {
} from "@mui/material";
import dayjs from "dayjs";
import relativeTime from "dayjs/plugin/relativeTime";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import type { CSSProperties } from "react";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { useTranslation } from "react-i18next";
import { Outlet, useNavigate } from "react-router";
import { SWRConfig } from "swr";
@@ -257,12 +257,24 @@ const Layout = () => {
<SWRConfig
value={{
errorRetryCount: 3,
// TODO remove the 5000ms
errorRetryInterval: 5000,
onError: (error, key) => {
console.error(`[SWR Error] Key: ${key}, Error:`, error);
// FIXME the condition should not be handle gllobally
if (key !== "getAutotemProxy") {
console.error(`SWR Error for ${key}:`, error);
return;
}
// FIXME we need a better way to handle the retry when first booting app
const silentKeys = [
"getVersion",
"getClashConfig",
"getAutotemProxy",
];
if (silentKeys.includes(key)) return;
console.error(`[SWR Error] Key: ${key}, Error:`, error);
},
dedupingInterval: 2000,
}}

View File

@@ -3,13 +3,13 @@ import { Box, Button, ButtonGroup } from "@mui/material";
import { useLockFn } from "ahooks";
import { useCallback, useEffect, useMemo, useReducer, useState } from "react";
import { useTranslation } from "react-i18next";
import useSWR from "swr";
import { closeAllConnections, getBaseConfig } from "tauri-plugin-mihomo-api";
import { closeAllConnections } from "tauri-plugin-mihomo-api";
import { BasePage } from "@/components/base";
import { ProviderButton } from "@/components/proxy/provider-button";
import { ProxyGroups } from "@/components/proxy/proxy-groups";
import { useVerge } from "@/hooks/use-verge";
import { useAppData } from "@/providers/app-data-context";
import {
getRuntimeProxyChainConfig,
patchClashMode,
@@ -41,21 +41,11 @@ const ProxyPage = () => {
null as string | null,
);
const { clashConfig, refreshClashConfig } = useAppData();
const updateChainConfigData = useCallback((value: string | null) => {
dispatchChainConfigData(value);
}, []);
const { data: clashConfig, mutate: mutateClash } = useSWR(
"getClashConfig",
getBaseConfig,
{
revalidateOnFocus: false,
revalidateIfStale: true,
dedupingInterval: 1000,
errorRetryInterval: 5000,
},
);
const { verge } = useVerge();
const modeList = useMemo(() => MODES, []);
@@ -69,7 +59,7 @@ const ProxyPage = () => {
closeAllConnections();
}
await patchClashMode(mode);
mutateClash();
refreshClashConfig();
});
const onToggleChainMode = useLockFn(async () => {

View File

@@ -44,7 +44,7 @@ export const useAppData = () => {
const context = use(AppDataContext);
if (!context) {
throw new Error("useAppData必须在AppDataProvider内使用");
throw new Error("useAppData must be used within AppDataProvider");
}
return context;

View File

@@ -15,7 +15,7 @@ import {
getRunningMode,
getSystemProxy,
} from "@/services/cmds";
import { SWR_DEFAULTS, SWR_REALTIME, SWR_SLOW_POLL } from "@/services/config";
import { SWR_DEFAULTS, SWR_MIHOMO, SWR_REALTIME } from "@/services/config";
import { AppDataContext, AppDataContextType } from "./app-data-context";
@@ -32,32 +32,36 @@ export const AppDataProvider = ({
calcuProxies,
{
...SWR_REALTIME,
onError: (err) => console.warn("[DataProvider] Proxy fetch failed:", err),
onError: (_) => {
// FIXME when we intially start the app, and core is starting,
// there will be error thrown by getProxies API.
// We should handle this case properly later.
},
},
);
const { data: clashConfig, mutate: refreshClashConfig } = useSWR(
"getClashConfig",
getBaseConfig,
SWR_SLOW_POLL,
SWR_MIHOMO,
);
const { data: proxyProviders, mutate: refreshProxyProviders } = useSWR(
"getProxyProviders",
calcuProxyProviders,
SWR_DEFAULTS,
SWR_MIHOMO,
);
const { data: ruleProviders, mutate: refreshRuleProviders } = useSWR(
"getRuleProviders",
getRuleProviders,
SWR_DEFAULTS,
SWR_MIHOMO,
);
const { data: rulesData, mutate: refreshRules } = useSWR(
"getRules",
getRules,
SWR_DEFAULTS,
SWR_MIHOMO,
);
useEffect(() => {

View File

@@ -1,4 +1,12 @@
import { useSWRConfig } from "swr";
const SWR_NOT_SMART = {
revalidateOnFocus: false,
revalidateOnReconnect: false,
revalidateIfStale: false,
suspense: false,
errorRetryCount: 2,
dedupingInterval: 1500,
errorRetryInterval: 3000,
} as const;
export const SWR_DEFAULTS = {
revalidateOnFocus: false,
@@ -19,7 +27,6 @@ export const SWR_SLOW_POLL = {
refreshInterval: 60000,
} as const;
export const useSWRMutate = () => {
const { mutate } = useSWRConfig();
return mutate;
export const SWR_MIHOMO = {
...SWR_NOT_SMART,
};

View File

@@ -0,0 +1,55 @@
export type WebdavStatus = "unknown" | "ready" | "failed";
interface WebdavStatusCache {
signature: string;
status: WebdavStatus;
updatedAt: number;
}
const WEBDAV_STATUS_KEY = "webdav_status_cache";
export const buildWebdavSignature = (
verge?: Pick<
IVergeConfig,
"webdav_url" | "webdav_username" | "webdav_password"
> | null,
) => {
const url = verge?.webdav_url?.trim() ?? "";
const username = verge?.webdav_username?.trim() ?? "";
const password = verge?.webdav_password ?? "";
if (!url && !username && !password) return "";
return JSON.stringify([url, username, password]);
};
const canUseStorage = () => typeof localStorage !== "undefined";
export const getWebdavStatus = (signature: string): WebdavStatus => {
if (!signature || !canUseStorage()) return "unknown";
const raw = localStorage.getItem(WEBDAV_STATUS_KEY);
if (!raw) return "unknown";
try {
const data = JSON.parse(raw) as Partial<WebdavStatusCache>;
if (!data || data.signature !== signature) return "unknown";
return data.status === "ready" || data.status === "failed"
? data.status
: "unknown";
} catch {
return "unknown";
}
};
export const setWebdavStatus = (signature: string, status: WebdavStatus) => {
if (!signature || !canUseStorage()) return;
const payload: WebdavStatusCache = {
signature,
status,
updatedAt: Date.now(),
};
localStorage.setItem(WEBDAV_STATUS_KEY, JSON.stringify(payload));
};

View File

@@ -535,6 +535,7 @@ export const translationKeys = [
"settings.modals.misc.fields.proxyLayoutColumns",
"settings.modals.misc.fields.autoLogClean",
"settings.modals.misc.fields.autoDelayDetection",
"settings.modals.misc.fields.autoDelayDetectionInterval",
"settings.modals.misc.fields.defaultLatencyTest",
"settings.modals.misc.fields.defaultLatencyTimeout",
"settings.modals.misc.tooltips.autoCloseConnections",

View File

@@ -941,6 +941,7 @@ export interface TranslationResources {
autoCheckUpdate: string;
autoCloseConnections: string;
autoDelayDetection: string;
autoDelayDetectionInterval: string;
autoLogClean: string;
defaultLatencyTest: string;
defaultLatencyTimeout: string;

View File

@@ -946,6 +946,7 @@ interface IVergeConfig {
default_latency_test?: string;
default_latency_timeout?: number;
enable_auto_delay_detection?: boolean;
auto_delay_detection_interval_minutes?: number;
enable_builtin_enhanced?: boolean;
auto_log_clean?: 0 | 1 | 2 | 3 | 4;
enable_auto_backup_schedule?: boolean;