# LLM API 通过cloudflare gateway调用llm import re from typing import Any, Union import httpx from retry import retry def find_value_recursive(key:str, data:Union[dict, list]) -> str | None | Any: if isinstance(data, dict): if key in data: return data[key] # 递归检查所有其他键的值 for value in data.values(): result = find_value_recursive(key, value) if result is not None: return result elif isinstance(data, list): for item in data: result = find_value_recursive(key, item) if result is not None: return result class LLMChat: """AWS S3下载""" @classmethod def INPUT_TYPES(s): return { "required": { "llm_provider": (["claude-3-5-sonnet-20241022-v2", "claude-3-5-sonnet-20241022-v3", "claude-3-7-sonnet-20250219-v1", "claude-4-sonnet-20250514-v1", "gpt-4o-1120", "gpt-4.1", "deepseek-v3", "deepseek-r1"],), "prompt": ("STRING", {"multiline": True}), "temperature": ("FLOAT",{"default": 0.7, "min": 0.0, "max": 1.0}), "max_tokens": ("INT",{"default": 4096, "min":1, "max":65535}), "timeout": ("INT", {"default": 120, "min": 30, "max": 900}), } } RETURN_TYPES = ("STRING",) RETURN_NAMES = ("llm输出",) FUNCTION = "chat" CATEGORY = "不忘科技-自定义节点🚩/llm" def chat(self, llm_provider:str, prompt:str, temperature:float, max_tokens:int, timeout:int): @retry(Exception, tries=3, delay=1) def _chat(): try: with httpx.Client(timeout=httpx.Timeout(timeout, connect=15)) as session: resp = session.post("https://gateway.bowong.cc/chat/completions", headers={ "Content-Type": "application/json", "Accept": "application/json", "Authorization": "Bearer auth-bowong7777" }, json={ "model": llm_provider, "messages": [ { "role": "user", "content": prompt } ], "temperature": temperature, "max_tokens": max_tokens }) resp.raise_for_status() resp = resp.json() content = find_value_recursive("content", resp) content = re.sub(r'\n{2,}', '\n', content) except Exception as e: # logger.exception("llm调用失败 {}".format(e)) raise Exception("llm调用失败 {}".format(e)) return (content,) return _chat()