Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 41 additions & 10 deletions agentmain.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
elif hasattr(sys.stderr, 'reconfigure'): sys.stderr.reconfigure(errors='replace')
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))

from llmcore import LLMSession, ToolClient, ClaudeSession, MixinSession, NativeToolClient, NativeClaudeSession, NativeOAISession
from llmcore import LLMSession, ToolClient, ClaudeSession, MixinSession, NativeToolClient, NativeClaudeSession, NativeOAISession, reload_mykeys, reload_mykeys_if_changed
from agent_loop import agent_runner_loop
from ga import GenericAgentHandler, smart_format, get_global_memory, format_error, consume_file

Expand Down Expand Up @@ -47,7 +47,20 @@ class GeneraticAgent:
def __init__(self):
script_dir = os.path.dirname(os.path.abspath(__file__))
os.makedirs(os.path.join(script_dir, 'temp'), exist_ok=True)
from llmcore import mykeys
self.lock = threading.Lock()
self.task_dir = None
self.history = []
self.task_queue = queue.Queue()
self.is_running = False; self.stop_sig = False
self.llm_no = 0; self.inc_out = False
self.handler = None; self.verbose = True
self._build_llm_clients()
self.llmclient = self.llmclients[self.llm_no]

def _build_llm_clients(self, mykeys=None):
"""从 mykeys 字典构建 llmclients 列表。"""
if mykeys is None:
from llmcore import mykeys
llm_sessions = []
for k, cfg in mykeys.items():
if not any(x in k for x in ['api', 'config', 'cookie']): continue
Expand All @@ -66,16 +79,32 @@ def __init__(self):
else: llm_sessions[i] = ToolClient(mixin)
except Exception as e: print(f'[WARN] Failed to init MixinSession with cfg {s["mixin_cfg"]}: {e}')
self.llmclients = llm_sessions
self.lock = threading.Lock()
self.task_dir = None
self.history = []
self.task_queue = queue.Queue()
self.is_running = False; self.stop_sig = False
self.llm_no = 0; self.inc_out = False
self.handler = None; self.verbose = True

def reload_llm_configs(self, force=False):
"""热重载 mykey.py,重建所有 LLM 客户端。
force=False 时仅在文件修改后才重载,force=True 时强制重载。"""
if force:
mk = reload_mykeys()
else:
changed, mk = reload_mykeys_if_changed()
if not changed:
return [(i, self.get_llm_name(b), i == self.llm_no) for i, b in enumerate(self.llmclients)]
old_history = None
try: old_history = self.llmclient.backend.history
except: pass
self._build_llm_clients(mk)
if not self.llmclients:
raise RuntimeError('重载后没有可用的 LLM 配置')
self.llm_no = min(self.llm_no, len(self.llmclients) - 1)
self.llmclient = self.llmclients[self.llm_no]
if old_history:
try: self.llmclient.backend.history = old_history
except: pass
print(f'[HotReload] LLM configs reloaded, {len(self.llmclients)} clients available.')
return [(i, self.get_llm_name(b), i == self.llm_no) for i, b in enumerate(self.llmclients)]

def next_llm(self, n=-1):
self.reload_llm_configs()
self.llm_no = ((self.llm_no + 1) if n < 0 else n) % len(self.llmclients)
lastc = self.llmclient
self.llmclient = self.llmclients[self.llm_no]
Expand All @@ -85,7 +114,9 @@ def next_llm(self, n=-1):
name = self.get_llm_name(model=True)
if 'glm' in name or 'minimax' in name or 'kimi' in name: load_tool_schema('_cn')
else: load_tool_schema()
def list_llms(self): return [(i, self.get_llm_name(b), i == self.llm_no) for i, b in enumerate(self.llmclients)]
def list_llms(self):
self.reload_llm_configs()
return [(i, self.get_llm_name(b), i == self.llm_no) for i, b in enumerate(self.llmclients)]
def get_llm_name(self, b=None, model=False):
b = self.llmclient if b is None else b
if isinstance(b, dict): return 'BADCONFIG_MIXIN'
Expand Down
8 changes: 8 additions & 0 deletions frontends/stapp.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,14 @@ def render_sidebar():
st.caption(f"空闲时间:{int(time.time()) - last_reply_time}秒", help="当超过30分钟未收到回复时,系统会自动任务")
if st.button("切换备用链路"):
agent.next_llm(); st.rerun(scope="fragment")
if st.button("🔄 重载 LLM 配置"):
try:
result = agent.reload_llm_configs(force=True)
names = ', '.join(name for _, name, _ in result)
st.toast(f"✅ 已重载,{len(result)} 个 LLM: {names}")
except Exception as e:
st.toast(f"❌ 重载失败: {e}")
st.rerun(scope="fragment")
if st.button("强行停止任务"):
agent.abort(); st.toast("已发送停止信号"); st.rerun()
if st.button("重新注入工具"):
Expand Down
32 changes: 32 additions & 0 deletions llmcore.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,38 @@ def _load_mykeys():
if not os.path.exists(p): raise Exception('[ERROR] mykey.py or mykey.json not found, please create one from mykey_template.')
with open(p, encoding='utf-8') as f: return json.load(f)

def reload_mykeys():
"""强制重新加载 mykey.py / mykey.json,刷新全局 mykeys 和 proxies 缓存。"""
import importlib
try:
import mykey
importlib.reload(mykey)
except ImportError:
pass
mk = _load_mykeys()
proxy = mk.get("proxy", 'http://127.0.0.1:2082')
px = {"http": proxy, "https": proxy} if proxy else None
globals().update(mykeys=mk, proxies=px)
return mk

_mykey_mtime = 0

def _get_mykey_mtime():
for name in ('mykey.py', 'mykey.json'):
p = os.path.join(os.path.dirname(os.path.abspath(__file__)), name)
if os.path.exists(p):
return os.path.getmtime(p)
return 0

def reload_mykeys_if_changed():
"""仅当 mykey 文件被修改过时才重载,返回 (是否重载, mykeys)。"""
global _mykey_mtime
mtime = _get_mykey_mtime()
if mtime > _mykey_mtime:
_mykey_mtime = mtime
return True, reload_mykeys()
return False, globals().get('mykeys') or _load_mykeys()

def __getattr__(name): # once guard in PEP 562
if name in ('mykeys', 'proxies'):
mk = _load_mykeys()
Expand Down