-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconfig.py
More file actions
67 lines (49 loc) · 1.64 KB
/
config.py
File metadata and controls
67 lines (49 loc) · 1.64 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import json
import os
CONFIG_DIR = os.path.expanduser("~/.mini-openclaw")
os.makedirs(CONFIG_DIR, exist_ok=True)
OWNER_FILE = os.path.join(CONFIG_DIR, "owner.txt")
USER_MODEL_FILE = os.path.join(CONFIG_DIR, "user_models.json")
AVAILABLE_MODELS = [
"qwen3.5-plus",
"qwen3-max-2026-01-23",
"qwen3-coder-next",
"qwen3-coder-plus",
"kimi-k2.5",
"glm-5",
"glm-4.7",
"MiniMax-M2.5",
]
DEFAULT_MODEL = "qwen3.5-plus"
# 所有模型都支持深度思考
THINKING_MODELS = AVAILABLE_MODELS
# 意图判断专用模型
INTENT_MODEL = "glm-4.7"
MAX_FILE_SIZE = 10 * 1024 * 1024
UPLOAD_DIR = os.path.join(CONFIG_DIR, "uploads")
os.makedirs(UPLOAD_DIR, exist_ok=True)
def get_owner_chat_id() -> str | None:
if os.path.exists(OWNER_FILE):
with open(OWNER_FILE, "r") as f:
return f.read().strip()
return None
def save_owner_chat_id(chat_id: str):
with open(OWNER_FILE, "w") as f:
f.write(str(chat_id))
def _load_user_models() -> dict:
if os.path.exists(USER_MODEL_FILE):
with open(USER_MODEL_FILE, "r", encoding="utf-8") as f:
return json.load(f)
return {}
def _save_user_models(data: dict):
with open(USER_MODEL_FILE, "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=2)
def get_user_model(platform: str, user_id: str) -> str:
user_models = _load_user_models()
key = f"{platform}_{user_id}"
return user_models.get(key, DEFAULT_MODEL)
def save_user_model(platform: str, user_id: str, model: str):
user_models = _load_user_models()
key = f"{platform}_{user_id}"
user_models[key] = model
_save_user_models(user_models)