mirror of
https://github.com/clash-verge-rev/clash-verge-rev.git
synced 2026-01-28 07:14:40 +08:00
chore: i18n (#5276)
* chore: notice i18n * feat: add script to clean up unused i18n keys * chore: cleanup i18n keys * refactor(i18n/proxies): migrate proxies UI to structured locale keys * chore: i18n for rule module * chore: i18n for profile module * chore: i18n for connections module * chore: i18n for settings module * chore: i18n for verge settings * chore: i18n for theme settings * chore: i18n for theme * chore(i18n): components.home.* * chore(i18n): remove unused i18n keys * chore(i18n): components.profile.* * chore(i18n): components.connection * chore(i18n): pages.logs.* * chore(i18n): pages.*.provider * chore(i18n): components.settings.externalCors.* * chore(i18n): components.settings.clash.* * chore(i18n): components.settings.liteMode.* * chore(i18n): components.settings.backup.* * chore(i18n): components.settings.clash.port.* * chore(i18n): components.settings.misc.* * chore(i18n): components.settings.update.* * chore(i18n): components.settings.sysproxy.* * chore(i18n): components.settings.sysproxy.* * chore(i18n): pages.profiles.notices/components.providers.notices * refactor(notice): unify showNotice usage * refactor(notice): add typed showNotice shortcuts, centralize defaults, and simplify subscriptions * refactor: unify showNotice usage * refactor(notice): unify showNotice API * refactor(notice): unify showNotice usage * chore(i18n): components.test.* * chore(i18n): components.settings.dns.* * chore(i18n): components.home.clashInfo.* * chore(i18n): components.home.systemInfo.* * chore(i18n): components.home.ipInfo/traffic.* * chore(i18n): navigation.* * refactor(i18n): remove pages.* namespace and migrate route texts under module-level page keys * chore(i18n): common.* * chore(i18n): common.* * fix: change error handling in patch_profiles_config to return false when a switch is in progress * fix: improve error handling in patch_profiles_config to prevent requests during profile switching * fix: change error handling in patch_profiles_config to return false when a switch is in progress fix: ensure CURRENT_SWITCHING_PROFILE is reset after config updates in perform_config_update and patch_profiles_config * chore(i18n): restructure root-level locale keys into namespaces * chore(i18n): add missing i18n keys * docs: i18n guide * chore: adjust i18n * refactor(i18n): align UI actions and status labels with common keys * refactor(i18n): unify two-name locale namespaces * refactor(i18n/components): unify locale keys and update component references * chore(i18n): add shared and entities namespaces to all locale files * refactor(i18n): consolidate shared and entity namespaces across features * chore(deps): update npm dependencies to ^7.3.5 (#5310) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> * refactor(i18n): migrate shared editor modes and consolidate entities namespaces * tmp * refactor(i18n): flatten locales and move theme/validation strings * docs: CONTRIBUTING_i18n.md * refactor(i18n): restructure feedback and profile namespaces for better organization * refactor(i18n): unify settings locale structure and update references * refactor(i18n): reorganize locale keys for home, proxies, rules, connections, logs, unlock, and tests * refactor(i18n/feedback/layout): unify shared toasts & normalize layout namespace * refactor(i18n): centralize common UI strings in shared * refactor(i18n): flatten headers and unify locale schema * refactor(i18n): consolidate duplicate per-feature translations into shared namespace * refactor(i18n): split locales into per-namespace files * style: lint * refactor(i18n): unify unlock UI translations under tests namespace * feat(i18n): add type-checked translation keys * style: eslint import order * feat(i18n): replace ad-hoc loader with rust-i18n backend bundles * chore(prebuild): remove locale-copy step * fix(i18n, notice): propagate runtime params and update cleanup script path * fix(i18n,notice): make locale formatting idempotent and guard early notice translations * fix(i18n): resolve locale aliases and match OS codes correctly * fix(unlock): use i18next-compatible double-brace interpolation in failure notice * fix(i18n): route unlock error notices through translation keys * fix(i18n): i18n types * feat(i18n): localize upgrade notice for Clash core viewer * fix(notice): ensure runtime overrides apply to prefix translations * chore(i18n): replace literal notices with translation keys * chore(i18n): types * chore(i18n): regen typings before formatting to keep keys in sync * chore(i18n): simply labels * chore(i18n): adjust translation * chore: remove eslint-plugin-i18next * chore(i18n): add/refine Korean translations across frontend scopes and Rust backend (#5341) * chore(i18n): translate settings.json (missed in previous pass) (#5343) * chore(i18n): add/refine Korean translations across frontend scopes and Rust backend * chore(i18n): add/refine Korean translations across frontend scopes and Rust backend * fix(i18n-tauri): quote placeholder-leading value in ko.yml to prevent rust_i18n parse panic * chore(i18n): translate settings.json (forgot to include previously) --------- Co-authored-by: rozan <34974262+thelojan@users.noreply.github.com>
This commit is contained in:
@@ -1,102 +0,0 @@
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const LOCALES_DIR = path.resolve(__dirname, "../src/locales");
|
||||
const SRC_DIRS = [
|
||||
path.resolve(__dirname, "../src"),
|
||||
path.resolve(__dirname, "../src-tauri"),
|
||||
];
|
||||
const exts = [".js", ".ts", ".tsx", ".jsx", ".vue", ".rs"];
|
||||
|
||||
// 递归获取所有文件
|
||||
function getAllFiles(dir, exts) {
|
||||
let files = [];
|
||||
fs.readdirSync(dir).forEach((file) => {
|
||||
const full = path.join(dir, file);
|
||||
if (fs.statSync(full).isDirectory()) {
|
||||
files = files.concat(getAllFiles(full, exts));
|
||||
} else if (exts.includes(path.extname(full))) {
|
||||
files.push(full);
|
||||
}
|
||||
});
|
||||
return files;
|
||||
}
|
||||
|
||||
// 读取所有源码内容为一个大字符串
|
||||
function getAllSourceContent() {
|
||||
const files = SRC_DIRS.flatMap((dir) => getAllFiles(dir, exts));
|
||||
return files.map((f) => fs.readFileSync(f, "utf8")).join("\n");
|
||||
}
|
||||
|
||||
// 白名单 key,不检查这些 key 是否被使用
|
||||
const WHITELIST_KEYS = [
|
||||
"theme.light",
|
||||
"theme.dark",
|
||||
"theme.system",
|
||||
"Already Using Latest Core Version",
|
||||
];
|
||||
|
||||
// 主流程
|
||||
function processI18nFile(i18nPath, lang, allSource) {
|
||||
const i18n = JSON.parse(fs.readFileSync(i18nPath, "utf8"));
|
||||
const keys = Object.keys(i18n);
|
||||
|
||||
const used = {};
|
||||
const unused = [];
|
||||
|
||||
let checked = 0;
|
||||
const total = keys.length;
|
||||
keys.forEach((key) => {
|
||||
if (WHITELIST_KEYS.includes(key)) {
|
||||
used[key] = i18n[key];
|
||||
} else {
|
||||
// 只查找一次
|
||||
const regex = new RegExp(`["'\`]${key}["'\`]`);
|
||||
if (regex.test(allSource)) {
|
||||
used[key] = i18n[key];
|
||||
} else {
|
||||
unused.push(key);
|
||||
}
|
||||
}
|
||||
checked++;
|
||||
if (checked % 20 === 0 || checked === total) {
|
||||
const percent = ((checked / total) * 100).toFixed(1);
|
||||
process.stdout.write(
|
||||
`\r[${lang}] Progress: ${checked}/${total} (${percent}%)`,
|
||||
);
|
||||
if (checked === total) process.stdout.write("\n");
|
||||
}
|
||||
});
|
||||
|
||||
// 输出未使用的 key
|
||||
console.log(`\n[${lang}] Unused keys:`, unused);
|
||||
|
||||
// 备份原文件
|
||||
const oldPath = i18nPath + ".old";
|
||||
fs.renameSync(i18nPath, oldPath);
|
||||
|
||||
// 写入精简后的 i18n 文件(保留原文件名)
|
||||
fs.writeFileSync(i18nPath, JSON.stringify(used, null, 2), "utf8");
|
||||
console.log(
|
||||
`[${lang}] Cleaned i18n file written to src/locales/${path.basename(i18nPath)}`,
|
||||
);
|
||||
console.log(`[${lang}] Original file backed up as ${path.basename(oldPath)}`);
|
||||
}
|
||||
|
||||
function main() {
|
||||
// 支持 zhtw.json、zh-tw.json、zh_CN.json 等
|
||||
const files = fs
|
||||
.readdirSync(LOCALES_DIR)
|
||||
.filter((f) => /^[a-z0-9\-_]+\.json$/i.test(f) && !f.endsWith(".old"));
|
||||
const allSource = getAllSourceContent();
|
||||
files.forEach((file) => {
|
||||
const lang = path.basename(file, ".json");
|
||||
processI18nFile(path.join(LOCALES_DIR, file), lang, allSource);
|
||||
});
|
||||
}
|
||||
|
||||
main();
|
||||
1321
scripts/cleanup-unused-i18n.mjs
Normal file
1321
scripts/cleanup-unused-i18n.mjs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { promisify } from "util";
|
||||
|
||||
/**
|
||||
* 为Alpha版本重命名版本号
|
||||
|
||||
98
scripts/generate-i18n-keys.mjs
Normal file
98
scripts/generate-i18n-keys.mjs
Normal file
@@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env node
|
||||
import { promises as fs } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const ROOT_DIR = path.resolve(__dirname, "..");
|
||||
const LOCALE_DIR = path.resolve(ROOT_DIR, "src/locales/en");
|
||||
const KEY_OUTPUT = path.resolve(ROOT_DIR, "src/types/generated/i18n-keys.ts");
|
||||
const RESOURCE_OUTPUT = path.resolve(
|
||||
ROOT_DIR,
|
||||
"src/types/generated/i18n-resources.ts",
|
||||
);
|
||||
|
||||
const isPlainObject = (value) =>
|
||||
typeof value === "object" && value !== null && !Array.isArray(value);
|
||||
|
||||
const flattenKeys = (data, prefix = "") => {
|
||||
const keys = [];
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
const nextPrefix = prefix ? `${prefix}.${key}` : key;
|
||||
if (isPlainObject(value)) {
|
||||
keys.push(...flattenKeys(value, nextPrefix));
|
||||
} else {
|
||||
keys.push(nextPrefix);
|
||||
}
|
||||
}
|
||||
return keys;
|
||||
};
|
||||
|
||||
const buildType = (data, indent = 0) => {
|
||||
if (!isPlainObject(data)) {
|
||||
return "string";
|
||||
}
|
||||
|
||||
const entries = Object.entries(data).sort(([a], [b]) => a.localeCompare(b));
|
||||
const pad = " ".repeat(indent);
|
||||
const inner = entries
|
||||
.map(([key, value]) => {
|
||||
const typeStr = buildType(value, indent + 2);
|
||||
return `${" ".repeat(indent + 2)}${JSON.stringify(key)}: ${typeStr};`;
|
||||
})
|
||||
.join("\n");
|
||||
|
||||
return entries.length
|
||||
? `{
|
||||
${inner}
|
||||
${pad}}`
|
||||
: "{}";
|
||||
};
|
||||
|
||||
const loadNamespaceJson = async () => {
|
||||
const dirents = await fs.readdir(LOCALE_DIR, { withFileTypes: true });
|
||||
const namespaces = [];
|
||||
for (const dirent of dirents) {
|
||||
if (!dirent.isFile() || !dirent.name.endsWith(".json")) continue;
|
||||
const name = dirent.name.replace(/\.json$/, "");
|
||||
const filePath = path.join(LOCALE_DIR, dirent.name);
|
||||
const raw = await fs.readFile(filePath, "utf8");
|
||||
const json = JSON.parse(raw);
|
||||
namespaces.push({ name, json });
|
||||
}
|
||||
namespaces.sort((a, b) => a.name.localeCompare(b.name));
|
||||
return namespaces;
|
||||
};
|
||||
|
||||
const buildKeysFile = (keys) => {
|
||||
const arrayLiteral = keys.map((key) => ` "${key}"`).join(",\n");
|
||||
return `// This file is auto-generated by scripts/generate-i18n-keys.mjs\n// Do not edit this file manually.\n\nexport const translationKeys = [\n${arrayLiteral}\n] as const;\n\nexport type TranslationKey = typeof translationKeys[number];\n`;
|
||||
};
|
||||
|
||||
const buildResourcesFile = (namespaces) => {
|
||||
const namespaceEntries = namespaces
|
||||
.map(({ name, json }) => {
|
||||
const typeStr = buildType(json, 4);
|
||||
return ` ${JSON.stringify(name)}: ${typeStr};`;
|
||||
})
|
||||
.join("\n");
|
||||
|
||||
return `// This file is auto-generated by scripts/generate-i18n-keys.mjs\n// Do not edit this file manually.\n\nexport interface TranslationResources {\n translation: {\n${namespaceEntries}\n };\n}\n`;
|
||||
};
|
||||
|
||||
const main = async () => {
|
||||
const namespaces = await loadNamespaceJson();
|
||||
const keys = namespaces.flatMap(({ name, json }) => flattenKeys(json, name));
|
||||
const keysContent = buildKeysFile(keys);
|
||||
const resourcesContent = buildResourcesFile(namespaces);
|
||||
await fs.mkdir(path.dirname(KEY_OUTPUT), { recursive: true });
|
||||
await fs.writeFile(KEY_OUTPUT, keysContent, "utf8");
|
||||
await fs.writeFile(RESOURCE_OUTPUT, resourcesContent, "utf8");
|
||||
console.log(`Generated ${keys.length} translation keys.`);
|
||||
};
|
||||
|
||||
main().catch((error) => {
|
||||
console.error("Failed to generate i18n metadata:", error);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
@@ -1,9 +1,10 @@
|
||||
import fs from "fs";
|
||||
import fsp from "fs/promises";
|
||||
import path from "path";
|
||||
import AdmZip from "adm-zip";
|
||||
import { createRequire } from "module";
|
||||
import path from "path";
|
||||
|
||||
import { getOctokit, context } from "@actions/github";
|
||||
import AdmZip from "adm-zip";
|
||||
|
||||
const target = process.argv.slice(2)[0];
|
||||
const alpha = process.argv.slice(2)[1];
|
||||
@@ -79,11 +80,11 @@ async function resolvePortable() {
|
||||
tag,
|
||||
});
|
||||
|
||||
let assets = release.assets.filter((x) => {
|
||||
const assets = release.assets.filter((x) => {
|
||||
return x.name === zipFile;
|
||||
});
|
||||
if (assets.length > 0) {
|
||||
let id = assets[0].id;
|
||||
const id = assets[0].id;
|
||||
await github.rest.repos.deleteReleaseAsset({
|
||||
...options,
|
||||
asset_id: id,
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import AdmZip from "adm-zip";
|
||||
import { createRequire } from "module";
|
||||
import fsp from "fs/promises";
|
||||
import { createRequire } from "module";
|
||||
import path from "path";
|
||||
|
||||
import AdmZip from "adm-zip";
|
||||
|
||||
const target = process.argv.slice(2)[0];
|
||||
const ARCH_MAP = {
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
import AdmZip from "adm-zip";
|
||||
import { execSync } from "child_process";
|
||||
import { createHash } from "crypto";
|
||||
import fs from "fs";
|
||||
import fsp from "fs/promises";
|
||||
import path from "path";
|
||||
import zlib from "zlib";
|
||||
|
||||
import AdmZip from "adm-zip";
|
||||
import { glob } from "glob";
|
||||
import { HttpsProxyAgent } from "https-proxy-agent";
|
||||
import fetch from "node-fetch";
|
||||
import path from "path";
|
||||
import { extract } from "tar";
|
||||
import zlib from "zlib";
|
||||
|
||||
import { log_debug, log_error, log_info, log_success } from "./utils.mjs";
|
||||
|
||||
/**
|
||||
@@ -55,7 +57,7 @@ const ARCH_MAP = {
|
||||
|
||||
const arg1 = process.argv.slice(2)[0];
|
||||
const arg2 = process.argv.slice(2)[1];
|
||||
let target = arg1 === "--force" || arg1 === "-f" ? arg2 : arg1;
|
||||
const target = arg1 === "--force" || arg1 === "-f" ? arg2 : arg1;
|
||||
const { platform, arch } = target
|
||||
? { platform: PLATFORM_MAP[target], arch: ARCH_MAP[target] }
|
||||
: process;
|
||||
@@ -113,7 +115,7 @@ async function calculateFileHash(filePath) {
|
||||
const hashSum = createHash("sha256");
|
||||
hashSum.update(fileBuffer);
|
||||
return hashSum.digest("hex");
|
||||
} catch (err) {
|
||||
} catch (ignoreErr) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -547,9 +549,9 @@ const resolveServicePermission = async () => {
|
||||
const hashCache = await loadHashCache();
|
||||
let hasChanges = false;
|
||||
|
||||
for (let f of serviceExecutables) {
|
||||
for (const f of serviceExecutables) {
|
||||
const files = glob.sync(path.join(resDir, f));
|
||||
for (let filePath of files) {
|
||||
for (const filePath of files) {
|
||||
if (fs.existsSync(filePath)) {
|
||||
const currentHash = await calculateFileHash(filePath);
|
||||
const cacheKey = `${filePath}_chmod`;
|
||||
@@ -573,52 +575,29 @@ const resolveServicePermission = async () => {
|
||||
}
|
||||
};
|
||||
|
||||
// resolve locales (从 src/locales 复制到 resources/locales,并使用 hash 检查)
|
||||
async function resolveLocales() {
|
||||
const srcLocalesDir = path.join(cwd, "src/locales");
|
||||
const targetLocalesDir = path.join(cwd, "src-tauri/resources/locales");
|
||||
|
||||
try {
|
||||
await fsp.mkdir(targetLocalesDir, { recursive: true });
|
||||
const files = await fsp.readdir(srcLocalesDir);
|
||||
for (const file of files) {
|
||||
const srcPath = path.join(srcLocalesDir, file);
|
||||
const targetPath = path.join(targetLocalesDir, file);
|
||||
if (!(await hasFileChanged(srcPath, targetPath))) continue;
|
||||
await fsp.copyFile(srcPath, targetPath);
|
||||
await updateHashCache(targetPath);
|
||||
log_success(`Copied locale file: ${file}`);
|
||||
}
|
||||
log_success("All locale files processed successfully");
|
||||
} catch (err) {
|
||||
log_error("Error copying locale files:", err.message);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
// =======================
|
||||
// Other resource resolvers (service, mmdb, geosite, geoip, enableLoopback, sysproxy)
|
||||
// =======================
|
||||
const SERVICE_URL = `https://github.com/clash-verge-rev/clash-verge-service-ipc/releases/download/${SIDECAR_HOST}`;
|
||||
const resolveService = () => {
|
||||
let ext = platform === "win32" ? ".exe" : "";
|
||||
let suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
|
||||
const ext = platform === "win32" ? ".exe" : "";
|
||||
const suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
|
||||
return resolveResource({
|
||||
file: "clash-verge-service" + suffix + ext,
|
||||
downloadURL: `${SERVICE_URL}/clash-verge-service${ext}`,
|
||||
});
|
||||
};
|
||||
const resolveInstall = () => {
|
||||
let ext = platform === "win32" ? ".exe" : "";
|
||||
let suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
|
||||
const ext = platform === "win32" ? ".exe" : "";
|
||||
const suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
|
||||
return resolveResource({
|
||||
file: "clash-verge-service-install" + suffix + ext,
|
||||
downloadURL: `${SERVICE_URL}/clash-verge-service-install${ext}`,
|
||||
});
|
||||
};
|
||||
const resolveUninstall = () => {
|
||||
let ext = platform === "win32" ? ".exe" : "";
|
||||
let suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
|
||||
const ext = platform === "win32" ? ".exe" : "";
|
||||
const suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
|
||||
return resolveResource({
|
||||
file: "clash-verge-service-uninstall" + suffix + ext,
|
||||
downloadURL: `${SERVICE_URL}/clash-verge-service-uninstall${ext}`,
|
||||
@@ -715,7 +694,6 @@ const tasks = [
|
||||
retry: 5,
|
||||
macosOnly: true,
|
||||
},
|
||||
{ name: "locales", func: resolveLocales, retry: 2 },
|
||||
];
|
||||
|
||||
async function runTask() {
|
||||
|
||||
@@ -30,10 +30,11 @@
|
||||
*/
|
||||
|
||||
import { execSync } from "child_process";
|
||||
import { program } from "commander";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
|
||||
import { program } from "commander";
|
||||
|
||||
/**
|
||||
* 获取当前 git 短 commit hash
|
||||
* @returns {string}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import axios from "axios";
|
||||
import { readFileSync } from "fs";
|
||||
|
||||
import axios from "axios";
|
||||
|
||||
import { log_error, log_info, log_success } from "./utils.mjs";
|
||||
|
||||
const CHAT_ID_RELEASE = "@clash_verge_re"; // 正式发布频道
|
||||
|
||||
@@ -58,7 +58,7 @@ export async function resolveUpdateLogDefault() {
|
||||
const reEnd = /^---/;
|
||||
|
||||
let isCapturing = false;
|
||||
let content = [];
|
||||
const content = [];
|
||||
let firstTag = "";
|
||||
|
||||
for (const line of data.split("\n")) {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fetch from "node-fetch";
|
||||
import { getOctokit, context } from "@actions/github";
|
||||
import fetch from "node-fetch";
|
||||
|
||||
import { resolveUpdateLog } from "./updatelog.mjs";
|
||||
|
||||
const UPDATE_TAG_NAME = "updater";
|
||||
@@ -113,7 +114,7 @@ async function resolveUpdater() {
|
||||
});
|
||||
|
||||
// delete the old assets
|
||||
for (let asset of updateRelease.assets) {
|
||||
for (const asset of updateRelease.assets) {
|
||||
if (asset.name === UPDATE_JSON_FILE) {
|
||||
await github.rest.repos.deleteReleaseAsset({
|
||||
...options,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fetch from "node-fetch";
|
||||
import { getOctokit, context } from "@actions/github";
|
||||
import fetch from "node-fetch";
|
||||
|
||||
import { resolveUpdateLog, resolveUpdateLogDefault } from "./updatelog.mjs";
|
||||
|
||||
// Add stable update JSON filenames
|
||||
@@ -259,7 +260,7 @@ async function processRelease(github, options, tag, isAlpha) {
|
||||
const proxyFile = isAlpha ? ALPHA_UPDATE_JSON_PROXY : UPDATE_JSON_PROXY;
|
||||
|
||||
// Delete existing assets with these names
|
||||
for (let asset of updateRelease.assets) {
|
||||
for (const asset of updateRelease.assets) {
|
||||
if (asset.name === jsonFile) {
|
||||
await github.rest.repos.deleteReleaseAsset({
|
||||
...options,
|
||||
|
||||
Reference in New Issue
Block a user